Пример #1
0
class StudyA(with_metaclass(StudyMetaClass, Study)):

    add_data_specs = [
        AcquiredFilesetSpec('x', text_format),
        AcquiredFilesetSpec('y', text_format),
        FilesetSpec('z', text_format, 'pipeline_alpha')
    ]

    add_param_specs = [
        ParameterSpec('o1', 1),
        ParameterSpec('o2', '2'),
        ParameterSpec('o3', 3.0)
    ]

    def pipeline_alpha(self, **name_maps):  # @UnusedVariable
        pipeline = self.pipeline(
            name='pipeline_alpha',
            desc="A dummy pipeline used to test MultiStudy class",
            references=[],
            name_maps=name_maps)
        math = pipeline.add("math", TestMath())
        math.inputs.op = 'add'
        math.inputs.as_file = True
        # Connect inputs
        pipeline.connect_input('x', math, 'x')
        pipeline.connect_input('y', math, 'y')
        # Connect outputs
        pipeline.connect_output('z', math, 'z')
        return pipeline
Пример #2
0
class T2Study(MRIStudy, metaclass=StudyMetaClass):

    add_parameter_specs = [
        ParameterSpec('bet_robust', True),
        ParameterSpec('bet_f_threshold', 0.5),
        ParameterSpec('bet_reduce_bias', False)
    ]
Пример #3
0
class T1Study(MRIStudy, metaclass=StudyMetaClass):

    add_data_specs = [
        DatasetSpec('fs_recon_all', freesurfer_recon_all_format,
                    'freesurfer_pipeline'),
        DatasetSpec('brain', nifti_gz_format, 'brain_extraction_pipeline')]

    add_parameter_specs = [
        ParameterSpec('bet_robust', True),
        ParameterSpec('bet_f_threshold', 0.57),
        ParameterSpec('bet_g_threshold', -0.1)]
    
    add_switch_specs = [
        SwitchSpec('bet_method', 'optibet',
                   choices=MRIStudy.switch_spec('bet_method').choices)]

    def freesurfer_pipeline(self, **kwargs):
        """
        Segments grey matter, white matter and CSF from T1 images using
        SPM "NewSegment" function.

        NB: Default values come from the W2MHS toolbox
        """
        pipeline = self.create_pipeline(
            name='segmentation',
            inputs=[DatasetSpec('primary', nifti_gz_format)],
            outputs=[DatasetSpec('fs_recon_all',
                                 freesurfer_recon_all_format)],
            desc="Segment white/grey matter and csf",
            version=1,
            citations=copy(freesurfer_cites),
            **kwargs)
        # FS ReconAll node
        recon_all = pipeline.create_node(
            interface=ReconAll(), name='recon_all',
            requirements=[freesurfer_req], wall_time=2000)
        recon_all.inputs.directive = 'all'
        recon_all.inputs.openmp = self.runner.num_processes
        # Wrapper around os.path.join
        join = pipeline.create_node(interface=JoinPath(), name='join')
        pipeline.connect(recon_all, 'subjects_dir', join, 'dirname')
        pipeline.connect(recon_all, 'subject_id', join, 'filename')
        # Connect inputs/outputs
        pipeline.connect_input('primary', recon_all, 'T1_files')
        pipeline.connect_output('fs_recon_all', join, 'path')
        return pipeline

    def segmentation_pipeline(self, **kwargs):
        pipeline = super(T1Study, self).segmentation_pipeline(img_type=1,
                                                              **kwargs)
        return pipeline
Пример #4
0
class MultiMultiStudy(with_metaclass(MultiStudyMetaClass, MultiStudy)):

    add_sub_study_specs = [
        SubStudySpec('ss1', StudyA),
        SubStudySpec('full', FullMultiStudy),
        SubStudySpec('partial', PartialMultiStudy)
    ]

    add_data_specs = [FilesetSpec('g', text_format, 'combined_pipeline')]

    add_param_specs = [ParameterSpec('combined_op', 'add')]

    def combined_pipeline(self, **name_maps):
        pipeline = self.pipeline(
            name='combined',
            desc=("A dummy pipeline used to test MultiMultiStudy class"),
            references=[],
            name_maps=name_maps)
        merge = pipeline.add("merge", Merge(3))
        math = pipeline.add("math", TestMath())
        math.inputs.op = self.parameter('combined_op')
        math.inputs.as_file = True
        # Connect inputs
        pipeline.connect_input('ss1_z', merge, 'in1')
        pipeline.connect_input('full_e', merge, 'in2')
        pipeline.connect_input('partial_ss2_z', merge, 'in3')
        # Connect nodes
        pipeline.connect(merge, 'out', math, 'x')
        # Connect outputs
        pipeline.connect_output('g', math, 'z')
        return pipeline
Пример #5
0
class StudyB(with_metaclass(StudyMetaClass, Study)):

    add_data_specs = [
        AcquiredFilesetSpec('w', text_format),
        AcquiredFilesetSpec('x', text_format),
        FilesetSpec('y', text_format, 'pipeline_beta'),
        FilesetSpec('z', text_format, 'pipeline_beta')
    ]

    add_param_specs = [
        ParameterSpec('o1', 10),
        ParameterSpec('o2', '20'),
        ParameterSpec('o3', 30.0),
        ParameterSpec('product_op', 'not-specified')
    ]  # Needs to be set to 'product' @IgnorePep8

    def pipeline_beta(self, **name_maps):  # @UnusedVariable
        pipeline = self.pipeline(
            name='pipeline_beta',
            desc="A dummy pipeline used to test MultiStudy class",
            references=[],
            name_maps=name_maps)
        add1 = pipeline.add("add1", TestMath())
        add2 = pipeline.add("add2", TestMath())
        prod = pipeline.add("product", TestMath())
        add1.inputs.op = 'add'
        add2.inputs.op = 'add'
        prod.inputs.op = self.parameter('product_op')
        add1.inputs.as_file = True
        add2.inputs.as_file = True
        prod.inputs.as_file = True
        # Connect inputs
        pipeline.connect_input('w', add1, 'x')
        pipeline.connect_input('x', add1, 'y')
        pipeline.connect_input('x', add2, 'x')
        # Connect nodes
        pipeline.connect(add1, 'z', add2, 'y')
        pipeline.connect(add1, 'z', prod, 'x')
        pipeline.connect(add2, 'z', prod, 'y')
        # Connect outputs
        pipeline.connect_output('y', add2, 'z')
        pipeline.connect_output('z', prod, 'z')
        return pipeline
Пример #6
0
class FullMultiStudy(with_metaclass(MultiStudyMetaClass, MultiStudy)):

    add_sub_study_specs = [
        SubStudySpec('ss1', StudyA, {
            'x': 'a',
            'y': 'b',
            'z': 'd',
            'o1': 'p1',
            'o2': 'p2',
            'o3': 'p3'
        }),
        SubStudySpec(
            'ss2', StudyB, {
                'w': 'b',
                'x': 'c',
                'y': 'e',
                'z': 'f',
                'o1': 'q1',
                'o2': 'q2',
                'o3': 'p3',
                'product_op': 'required_op'
            })
    ]

    add_data_specs = [
        AcquiredFilesetSpec('a', text_format),
        AcquiredFilesetSpec('b', text_format),
        AcquiredFilesetSpec('c', text_format),
        FilesetSpec('d', text_format, 'pipeline_alpha_trans'),
        FilesetSpec('e', text_format, 'pipeline_beta_trans'),
        FilesetSpec('f', text_format, 'pipeline_beta_trans')
    ]

    add_param_specs = [
        ParameterSpec('p1', 100),
        ParameterSpec('p2', '200'),
        ParameterSpec('p3', 300.0),
        ParameterSpec('q1', 150),
        ParameterSpec('q2', '250'),
        ParameterSpec('required_op', 'still-not-specified')
    ]

    pipeline_alpha_trans = MultiStudy.translate('ss1', 'pipeline_alpha')
    pipeline_beta_trans = MultiStudy.translate('ss2', 'pipeline_beta')
Пример #7
0
class PartialMultiStudy(with_metaclass(MultiStudyMetaClass, MultiStudy)):

    add_sub_study_specs = [
        SubStudySpec('ss1', StudyA, {
            'x': 'a',
            'y': 'b',
            'o1': 'p1'
        }),
        SubStudySpec('ss2', StudyB, {
            'w': 'b',
            'x': 'c',
            'o1': 'p1'
        })
    ]

    add_data_specs = [
        AcquiredFilesetSpec('a', text_format),
        AcquiredFilesetSpec('b', text_format),
        AcquiredFilesetSpec('c', text_format)
    ]

    pipeline_alpha_trans = MultiStudy.translate('ss1', 'pipeline_alpha')

    add_param_specs = [ParameterSpec('p1', 1000)]
Пример #8
0
class EPIStudy(MRIStudy, metaclass=StudyMetaClass):

    add_data_specs = [
        DatasetSpec('coreg_ref_preproc', nifti_gz_format, optional=True),
        DatasetSpec('coreg_ref_wmseg', nifti_gz_format, optional=True),
        DatasetSpec('reverse_phase', nifti_gz_format, optional=True),
        DatasetSpec('field_map_mag', nifti_gz_format, optional=True),
        DatasetSpec('field_map_phase', nifti_gz_format, optional=True),
        DatasetSpec('moco', nifti_gz_format, 'intrascan_alignment_pipeline'),
        DatasetSpec('align_mats', directory_format,
                    'intrascan_alignment_pipeline'),
        DatasetSpec('moco_par', par_format, 'intrascan_alignment_pipeline'),
        FieldSpec('field_map_delta_te', float, 'field_map_time_info_pipeline')
    ]

    add_parameter_specs = [
        ParameterSpec('bet_robust', True),
        ParameterSpec('bet_f_threshold', 0.2),
        ParameterSpec('bet_reduce_bias', False),
        ParameterSpec('fugue_echo_spacing', 0.000275)
    ]

    add_switch_specs = [
        SwitchSpec('linear_reg_method',
                   'epireg',
                   choices=('flirt', 'spm', 'ants', 'epireg'))
    ]

    def linear_coregistration_pipeline(self, **kwargs):
        if self.branch('linear_reg_method', 'epireg'):
            return self._epireg_linear_coregistration_pipeline(**kwargs)
        else:
            return super(EPIStudy,
                         self).linear_coregistration_pipeline(**kwargs)

    def _epireg_linear_coregistration_pipeline(self, **kwargs):

        pipeline = self.create_pipeline(
            name='linear_coreg',
            inputs=[
                DatasetSpec('brain', nifti_gz_format),
                DatasetSpec('coreg_ref_brain', nifti_gz_format),
                DatasetSpec('coreg_ref_preproc', nifti_gz_format),
                DatasetSpec('coreg_ref_wmseg', nifti_gz_format)
            ],
            outputs=[
                DatasetSpec('coreg_brain', nifti_gz_format),
                DatasetSpec('coreg_matrix', text_matrix_format)
            ],
            desc=("Intra-subjects epi registration improved using white "
                  "matter boundaries."),
            version=1,
            citations=[fsl_cite],
            **kwargs)
        epireg = pipeline.create_node(fsl.epi.EpiReg(),
                                      name='epireg',
                                      requirements=[fsl509_req])

        epireg.inputs.out_base = 'epireg2ref'
        pipeline.connect_input('brain', epireg, 'epi')
        pipeline.connect_input('coreg_ref_brain', epireg, 't1_brain')
        pipeline.connect_input('coreg_ref_preproc', epireg, 't1_head')
        pipeline.connect_input('coreg_ref_wmseg', epireg, 'wmseg')

        pipeline.connect_output('coreg_brain', epireg, 'out_file')
        pipeline.connect_output('coreg_matrix', epireg, 'epi2str_mat')
        return pipeline

    def intrascan_alignment_pipeline(self, **kwargs):

        pipeline = self.create_pipeline(
            name='MCFLIRT_pipeline',
            inputs=[DatasetSpec('preproc', nifti_gz_format)],
            outputs=[
                DatasetSpec('moco', nifti_gz_format),
                DatasetSpec('align_mats', directory_format),
                DatasetSpec('moco_par', par_format)
            ],
            desc=("Intra-epi volumes alignment."),
            version=1,
            citations=[fsl_cite],
            **kwargs)
        mcflirt = pipeline.create_node(fsl.MCFLIRT(),
                                       name='mcflirt',
                                       requirements=[fsl509_req])
        mcflirt.inputs.ref_vol = 0
        mcflirt.inputs.save_mats = True
        mcflirt.inputs.save_plots = True
        mcflirt.inputs.output_type = 'NIFTI_GZ'
        mcflirt.inputs.out_file = 'moco.nii.gz'
        pipeline.connect_input('preproc', mcflirt, 'in_file')
        pipeline.connect_output('moco', mcflirt, 'out_file')
        pipeline.connect_output('moco_par', mcflirt, 'par_file')

        merge = pipeline.create_node(MergeListMotionMat(), name='merge')
        pipeline.connect(mcflirt, 'mat_file', merge, 'file_list')
        pipeline.connect_output('align_mats', merge, 'out_dir')

        return pipeline

    def field_map_time_info_pipeline(self, **kwargs):

        pipeline = self.create_pipeline(
            name='field_map_time_info_pipeline',
            inputs=[DatasetSpec('field_map_mag', dicom_format)],
            outputs=[FieldSpec('field_map_delta_te', float)],
            desc=("Pipeline to extract delta TE from field map "
                  "images, if provided"),
            version=1,
            citations=[fsl_cite],
            **kwargs)

        delta_te = pipeline.create_node(FieldMapTimeInfo(),
                                        name='extract_delta_te')
        pipeline.connect_input('field_map_mag', delta_te, 'fm_mag')
        pipeline.connect_output('field_map_delta_te', delta_te, 'delta_te')

        return pipeline

    def preproc_pipeline(self, **kwargs):

        if ('field_map_phase' in self.input_names
                and 'field_map_mag' in self.input_names):
            return self._fugue_pipeline(**kwargs)
        elif 'reverse_phase' in self.input_names:
            return self._topup_pipeline(**kwargs)
        else:
            return super(EPIStudy, self).preproc_pipeline(**kwargs)

    def _topup_pipeline(self, **kwargs):

        pipeline = self.create_pipeline(
            name='preproc_pipeline',
            inputs=[
                DatasetSpec('primary', nifti_gz_format),
                DatasetSpec('reverse_phase', nifti_gz_format),
                FieldSpec('ped', str),
                FieldSpec('pe_angle', str)
            ],
            outputs=[DatasetSpec('preproc', nifti_gz_format)],
            desc=("Topup distortion correction pipeline"),
            version=1,
            citations=[fsl_cite],
            **kwargs)

        reorient_epi_in = pipeline.create_node(fsl.utils.Reorient2Std(),
                                               name='reorient_epi_in',
                                               requirements=[fsl509_req])
        pipeline.connect_input('primary', reorient_epi_in, 'in_file')

        reorient_epi_opposite = pipeline.create_node(
            fsl.utils.Reorient2Std(),
            name='reorient_epi_opposite',
            requirements=[fsl509_req])
        pipeline.connect_input('reverse_phase', reorient_epi_opposite,
                               'in_file')
        prep_dwi = pipeline.create_node(PrepareDWI(), name='prepare_dwi')
        prep_dwi.inputs.topup = True
        pipeline.connect_input('ped', prep_dwi, 'pe_dir')
        pipeline.connect_input('pe_angle', prep_dwi, 'ped_polarity')
        pipeline.connect(reorient_epi_in, 'out_file', prep_dwi, 'dwi')
        pipeline.connect(reorient_epi_opposite, 'out_file', prep_dwi, 'dwi1')
        ped = pipeline.create_node(GenTopupConfigFiles(), name='gen_config')
        pipeline.connect(prep_dwi, 'pe', ped, 'ped')
        merge_outputs = pipeline.create_node(merge_lists(2),
                                             name='merge_files')
        pipeline.connect(prep_dwi, 'main', merge_outputs, 'in1')
        pipeline.connect(prep_dwi, 'secondary', merge_outputs, 'in2')
        merge = pipeline.create_node(fsl_merge(),
                                     name='fsl_merge',
                                     requirements=[fsl509_req])
        merge.inputs.dimension = 't'
        pipeline.connect(merge_outputs, 'out', merge, 'in_files')
        topup = pipeline.create_node(TOPUP(),
                                     name='topup',
                                     requirements=[fsl509_req])
        pipeline.connect(merge, 'merged_file', topup, 'in_file')
        pipeline.connect(ped, 'config_file', topup, 'encoding_file')
        in_apply_tp = pipeline.create_node(merge_lists(1), name='in_apply_tp')
        pipeline.connect(reorient_epi_in, 'out_file', in_apply_tp, 'in1')
        apply_topup = pipeline.create_node(ApplyTOPUP(),
                                           name='applytopup',
                                           requirements=[fsl509_req])
        apply_topup.inputs.method = 'jac'
        apply_topup.inputs.in_index = [1]
        pipeline.connect(in_apply_tp, 'out', apply_topup, 'in_files')
        pipeline.connect(ped, 'apply_topup_config', apply_topup,
                         'encoding_file')
        pipeline.connect(topup, 'out_movpar', apply_topup, 'in_topup_movpar')
        pipeline.connect(topup, 'out_fieldcoef', apply_topup,
                         'in_topup_fieldcoef')

        pipeline.connect_output('preproc', apply_topup, 'out_corrected')
        return pipeline

    def _fugue_pipeline(self, **kwargs):

        pipeline = self.create_pipeline(
            name='preproc_pipeline',
            inputs=[
                DatasetSpec('primary', nifti_gz_format),
                DatasetSpec('field_map_mag', nifti_gz_format),
                DatasetSpec('field_map_phase', nifti_gz_format),
                FieldSpec('field_map_delta_te', float)
            ],
            outputs=[DatasetSpec('preproc', nifti_gz_format)],
            desc=("Fugue distortion correction pipeline"),
            version=1,
            citations=[fsl_cite],
            **kwargs)

        reorient_epi_in = pipeline.create_node(fsl.utils.Reorient2Std(),
                                               name='reorient_epi_in',
                                               requirements=[fsl509_req])
        pipeline.connect_input('primary', reorient_epi_in, 'in_file')
        fm_mag_reorient = pipeline.create_node(fsl.utils.Reorient2Std(),
                                               name='reorient_fm_mag',
                                               requirements=[fsl509_req])
        pipeline.connect_input('field_map_mag', fm_mag_reorient, 'in_file')
        fm_phase_reorient = pipeline.create_node(fsl.utils.Reorient2Std(),
                                                 name='reorient_fm_phase',
                                                 requirements=[fsl509_req])
        pipeline.connect_input('field_map_phase', fm_phase_reorient, 'in_file')
        bet = pipeline.create_node(BET(),
                                   name="bet",
                                   wall_time=5,
                                   requirements=[fsl509_req])
        bet.inputs.robust = True
        pipeline.connect(fm_mag_reorient, 'out_file', bet, 'in_file')
        create_fmap = pipeline.create_node(PrepareFieldmap(),
                                           name="prepfmap",
                                           wall_time=5,
                                           requirements=[fsl509_req])
        #         create_fmap.inputs.delta_TE = 2.46
        pipeline.connect_input('field_map_delta_te', create_fmap, 'delta_TE')
        pipeline.connect(bet, "out_file", create_fmap, "in_magnitude")
        pipeline.connect(fm_phase_reorient, 'out_file', create_fmap,
                         'in_phase')

        fugue = pipeline.create_node(FUGUE(),
                                     name='fugue',
                                     wall_time=5,
                                     requirements=[fsl509_req])
        fugue.inputs.unwarp_direction = 'x'
        fugue.inputs.dwell_time = self.parameter('fugue_echo_spacing')
        fugue.inputs.unwarped_file = 'example_func.nii.gz'
        pipeline.connect(create_fmap, 'out_fieldmap', fugue, 'fmap_in_file')
        pipeline.connect(reorient_epi_in, 'out_file', fugue, 'in_file')
        pipeline.connect_output('preproc', fugue, 'unwarped_file')
        return pipeline

    def motion_mat_pipeline(self, **kwargs):

        inputs = [
            DatasetSpec('coreg_matrix', text_matrix_format),
            DatasetSpec('qform_mat', text_matrix_format)
        ]
        if 'reverse_phase' not in self.input_names:
            inputs.append(DatasetSpec('align_mats', directory_format))
        pipeline = self.create_pipeline(
            name='motion_mat_calculation',
            inputs=inputs,
            outputs=[DatasetSpec('motion_mats', motion_mats_format)],
            desc=("Motion matrices calculation"),
            version=1,
            citations=[fsl_cite],
            **kwargs)

        mm = pipeline.create_node(MotionMatCalculation(), name='motion_mats')
        pipeline.connect_input('coreg_matrix', mm, 'reg_mat')
        pipeline.connect_input('qform_mat', mm, 'qform_mat')
        if 'reverse_phase' not in self.input_names:
            pipeline.connect_input('align_mats', mm, 'align_mats')
        pipeline.connect_output('motion_mats', mm, 'motion_mats')
        return pipeline
Пример #9
0
class ExampleStudy(with_metaclass(StudyMetaClass, Study)):

    add_data_specs = [
        AcquiredFilesetSpec('one', text_format),
        AcquiredFilesetSpec('ten', text_format),
        FilesetSpec('derived1_1', text_format, 'pipeline1'),
        FilesetSpec('derived1_2', text_format, 'pipeline1'),
        FilesetSpec('derived2', text_format, 'pipeline2'),
        FilesetSpec('derived3', text_format, 'pipeline3'),
        FilesetSpec('derived4', text_format, 'pipeline4'),
        FilesetSpec('subject_summary',
                    text_format,
                    'subject_summary_pipeline',
                    frequency='per_subject'),
        FilesetSpec('visit_summary',
                    text_format,
                    'visit_summary_pipeline',
                    frequency='per_visit'),
        FilesetSpec('project_summary',
                    text_format,
                    'project_summary_pipeline',
                    frequency='per_study'),
        FilesetSpec('subject_ids',
                    text_format,
                    'subject_ids_access_pipeline',
                    frequency='per_visit'),
        FilesetSpec('visit_ids',
                    text_format,
                    'visit_ids_access_pipeline',
                    frequency='per_subject')
    ]

    add_param_specs = [ParameterSpec('pipeline_parameter', False)]

    def pipeline1(self, **name_maps):
        pipeline = self.pipeline(
            name='pipeline1',
            desc="A dummy pipeline used to test 'run_pipeline' method",
            references=[],
            name_maps=name_maps)
        if not self.parameter('pipeline_parameter'):
            raise Exception("Pipeline parameter was not accessible")
        indent = pipeline.add("ident1", IdentityInterface(['file']))
        indent2 = pipeline.add("ident2", IdentityInterface(['file']))
        # Connect inputs
        pipeline.connect_input('one', indent, 'file')
        pipeline.connect_input('one', indent2, 'file')
        # Connect outputs
        pipeline.connect_output('derived1_1', indent, 'file')
        pipeline.connect_output('derived1_2', indent2, 'file')
        return pipeline

    def pipeline2(self, **name_maps):
        pipeline = self.pipeline(
            name='pipeline2',
            desc="A dummy pipeline used to test 'run_pipeline' method",
            references=[],
            name_maps=name_maps)
        if not self.parameter('pipeline_parameter'):
            raise Exception("Pipeline parameter was not cascaded down to "
                            "pipeline2")
        math = pipeline.add("math", TestMath())
        math.inputs.op = 'add'
        math.inputs.as_file = True
        # Connect inputs
        pipeline.connect_input('one', math, 'x')
        pipeline.connect_input('derived1_1', math, 'y')
        # Connect outputs
        pipeline.connect_output('derived2', math, 'z')
        return pipeline

    def pipeline3(self, **name_maps):
        pipeline = self.pipeline(
            name='pipeline3',
            desc="A dummy pipeline used to test 'run_pipeline' method",
            references=[],
            name_maps=name_maps)
        indent = pipeline.add('ident', IdentityInterface(['file']))
        # Connect inputs
        pipeline.connect_input('derived2', indent, 'file')
        # Connect outputs
        pipeline.connect_output('derived3', indent, 'file')
        return pipeline

    def pipeline4(self, **name_maps):
        pipeline = self.pipeline(
            name='pipeline4',
            desc="A dummy pipeline used to test 'run_pipeline' method",
            references=[],
            name_maps=name_maps)
        math = pipeline.add("mrcat", TestMath())
        math.inputs.op = 'mul'
        math.inputs.as_file = True
        # Connect inputs
        pipeline.connect_input('derived1_2', math, 'x')
        pipeline.connect_input('derived3', math, 'y')
        # Connect outputs
        pipeline.connect_output('derived4', math, 'z')
        return pipeline

    def visit_ids_access_pipeline(self, **name_maps):
        pipeline = self.pipeline(
            name='visit_ids_access',
            desc=("A dummy pipeline used to test access to 'session' IDs"),
            references=[],
            name_maps=name_maps)
        visits_to_file = pipeline.add('visits_to_file',
                                      IteratorToFile(),
                                      joinsource=self.VISIT_ID,
                                      joinfield='ids')
        pipeline.connect_input(self.VISIT_ID, visits_to_file, 'ids')
        pipeline.connect_input(self.SUBJECT_ID, visits_to_file, 'fixed_id')
        pipeline.connect_output('visit_ids', visits_to_file, 'out_file')
        return pipeline

    def subject_ids_access_pipeline(self, **name_maps):
        pipeline = self.pipeline(
            name='subject_ids_access',
            desc=("A dummy pipeline used to test access to 'subject' IDs"),
            references=[],
            name_maps=name_maps)
        subjects_to_file = pipeline.add('subjects_to_file',
                                        IteratorToFile(),
                                        joinfield='ids',
                                        joinsource=self.SUBJECT_ID)
        pipeline.connect_input(self.SUBJECT_ID, subjects_to_file, 'ids')
        pipeline.connect_input(self.VISIT_ID, subjects_to_file, 'fixed_id')
        pipeline.connect_output('subject_ids', subjects_to_file, 'out_file')
        return pipeline

    def subject_summary_pipeline(self, **name_maps):
        pipeline = self.pipeline(name="subject_summary",
                                 desc=("Test of project summary variables"),
                                 references=[],
                                 name_maps=name_maps)
        math = pipeline.add('math',
                            TestMath(),
                            joinfield='x',
                            joinsource=self.VISIT_ID)
        math.inputs.op = 'add'
        math.inputs.as_file = True
        # Connect inputs
        pipeline.connect_input('one', math, 'x')
        # Connect outputs
        pipeline.connect_output('subject_summary', math, 'z')
        return pipeline

    def visit_summary_pipeline(self, **name_maps):
        pipeline = self.pipeline(name="visit_summary",
                                 desc=("Test of project summary variables"),
                                 references=[],
                                 name_maps=name_maps)
        math = pipeline.add('math',
                            TestMath(),
                            joinfield='x',
                            joinsource=self.SUBJECT_ID)
        math.inputs.op = 'add'
        math.inputs.as_file = True
        # Connect inputs
        pipeline.connect_input('one', math, 'x')
        # Connect outputs
        pipeline.connect_output('visit_summary', math, 'z')
        return pipeline

    def project_summary_pipeline(self, **name_maps):
        pipeline = self.pipeline(name="project_summary",
                                 desc=("Test of project summary variables"),
                                 references=[],
                                 name_maps=name_maps)
        math1 = pipeline.add('math1',
                             TestMath(),
                             joinfield='x',
                             joinsource=self.VISIT_ID)
        math2 = pipeline.add('math2',
                             TestMath(),
                             joinfield='x',
                             joinsource=self.SUBJECT_ID)
        math1.inputs.op = 'add'
        math2.inputs.op = 'add'
        math1.inputs.as_file = True
        math2.inputs.as_file = True
        # Connect inputs
        pipeline.connect_input('one', math1, 'x')
        pipeline.connect(math1, 'z', math2, 'x')
        # Connect outputs
        pipeline.connect_output('project_summary', math2, 'z')
        return pipeline
Пример #10
0
def create_fmri_study_class(name,
                            t1,
                            epis,
                            epi_number,
                            echo_spacing,
                            fm_mag=None,
                            fm_phase=None,
                            run_regression=False):

    inputs = []
    dct = {}
    data_specs = []
    parameter_specs = []
    output_files = []
    distortion_correction = False

    if fm_mag and fm_phase:
        logger.info('Both magnitude and phase field map images provided. EPI '
                    'ditortion correction will be performed.')
        distortion_correction = True
    elif fm_mag or fm_phase:
        logger.info(
            'In order to perform EPI ditortion correction both magnitude '
            'and phase field map images must be provided.')
    else:
        logger.info(
            'No field map image provided. Distortion correction will not be'
            'performed.')

    study_specs = [SubStudySpec('t1', T1Study)]
    ref_spec = {'t1_brain': 'coreg_ref_brain'}
    inputs.append(
        DatasetMatch('t1_primary', dicom_format, t1, is_regex=True, order=0))
    epi_refspec = ref_spec.copy()
    epi_refspec.update({
        't1_wm_seg': 'coreg_ref_wmseg',
        't1_preproc': 'coreg_ref_preproc',
        'train_data': 'train_data'
    })
    study_specs.append(SubStudySpec('epi_0', FunctionalMRIStudy, epi_refspec))
    if epi_number > 1:
        epi_refspec.update({
            't1_wm_seg': 'coreg_ref_wmseg',
            't1_preproc': 'coreg_ref_preproc',
            'train_data': 'train_data',
            'epi_0_coreg_to_atlas_warp': 'coreg_to_atlas_warp',
            'epi_0_coreg_to_atlas_mat': 'coreg_to_atlas_mat'
        })
        study_specs.extend(
            SubStudySpec('epi_{}'.format(i), FunctionalMRIStudy, epi_refspec)
            for i in range(1, epi_number))

    for i in range(epi_number):
        inputs.append(
            DatasetMatch('epi_{}_primary'.format(i),
                         dicom_format,
                         epis,
                         order=i,
                         is_regex=True))
        parameter_specs.append(
            ParameterSpec('epi_{}_fugue_echo_spacing'.format(i), echo_spacing))

    if distortion_correction:
        inputs.extend(
            DatasetMatch('epi_{}_field_map_mag'.format(i),
                         dicom_format,
                         fm_mag,
                         dicom_tags={IMAGE_TYPE_TAG: MAG_IMAGE_TYPE},
                         is_regex=True,
                         order=0) for i in range(epi_number))
        inputs.extend(
            DatasetMatch('epi_{}_field_map_phase'.format(i),
                         dicom_format,
                         fm_phase,
                         dicom_tags={IMAGE_TYPE_TAG: PHASE_IMAGE_TYPE},
                         is_regex=True,
                         order=0) for i in range(epi_number))
    if run_regression:
        output_files.extend('epi_{}_smoothed_ts'.format(i)
                            for i in range(epi_number))
    else:
        output_files.extend('epi_{}_fix_dir'.format(i)
                            for i in range(epi_number))

    dct['add_sub_study_specs'] = study_specs
    dct['add_data_specs'] = data_specs
    dct['add_parameter_specs'] = parameter_specs
    dct['__metaclass__'] = MultiStudyMetaClass
    return (MultiStudyMetaClass(name, (FunctionalMRIMixin, ),
                                dct), inputs, output_files)
Пример #11
0
class FunctionalMRIStudy(EPIStudy, metaclass=StudyMetaClass):

    add_parameter_specs = [
        ParameterSpec('component_threshold', 20),
        ParameterSpec('motion_reg', True),
        ParameterSpec('highpass', 0.01),
        ParameterSpec('brain_thresh_percent', 5),
        ParameterSpec('MNI_template',
                      os.path.join(atlas_path, 'MNI152_T1_2mm.nii.gz')),
        ParameterSpec(
            'MNI_template_mask',
            os.path.join(atlas_path, 'MNI152_T1_2mm_brain_mask.nii.gz')),
        ParameterSpec('group_ica_components', 15)
    ]

    add_data_specs = [
        DatasetSpec('train_data',
                    rfile_format,
                    optional=True,
                    frequency='per_project'),
        DatasetSpec('hand_label_noise', text_format,
                    'fix_preparation_pipeline'),
        DatasetSpec('labelled_components', text_format,
                    'fix_classification_pipeline'),
        DatasetSpec('cleaned_file', nifti_gz_format,
                    'fix_regression_pipeline'),
        DatasetSpec('filtered_data', nifti_gz_format,
                    'rsfMRI_filtering_pipeline'),
        DatasetSpec('mc_par', par_format, 'rsfMRI_filtering_pipeline'),
        DatasetSpec('melodic_ica', zip_format,
                    'single_subject_melodic_pipeline'),
        DatasetSpec('fix_dir', zip_format, 'fix_preparation_pipeline'),
        DatasetSpec('normalized_ts', nifti_gz_format,
                    'timeseries_normalization_to_atlas_pipeline'),
        DatasetSpec('smoothed_ts', nifti_gz_format, 'smoothing_pipeline')
    ]

    add_switch_specs = [
        SwitchSpec('linear_reg_method',
                   'ants',
                   choices=('flirt', 'spm', 'ants', 'epireg'))
    ]

    def rsfMRI_filtering_pipeline(self, **kwargs):

        pipeline = self.create_pipeline(
            name='rsfMRI_filtering',
            inputs=[
                DatasetSpec('preproc', nifti_gz_format),
                DatasetSpec('brain_mask', nifti_gz_format),
                DatasetSpec('coreg_ref_brain', nifti_gz_format),
                FieldSpec('tr', float)
            ],
            outputs=[
                DatasetSpec('filtered_data', nifti_gz_format),
                DatasetSpec('mc_par', par_format)
            ],
            desc=("Spatial and temporal rsfMRI filtering"),
            version=1,
            citations=[fsl_cite],
            **kwargs)

        afni_mc = pipeline.create_node(Volreg(),
                                       name='AFNI_MC',
                                       wall_time=5,
                                       requirements=[afni_req])
        afni_mc.inputs.zpad = 1
        afni_mc.inputs.out_file = 'rsfmri_mc.nii.gz'
        afni_mc.inputs.oned_file = 'prefiltered_func_data_mcf.par'
        pipeline.connect_input('preproc', afni_mc, 'in_file')

        filt = pipeline.create_node(Tproject(),
                                    name='Tproject',
                                    wall_time=5,
                                    requirements=[afni_req])
        filt.inputs.stopband = (0, 0.01)
        filt.inputs.polort = 3
        filt.inputs.blur = 3
        filt.inputs.out_file = 'filtered_func_data.nii.gz'
        pipeline.connect_input('tr', filt, 'delta_t')
        pipeline.connect(afni_mc, 'out_file', filt, 'in_file')
        pipeline.connect_input('brain_mask', filt, 'mask')

        meanfunc = pipeline.create_node(ImageMaths(op_string='-Tmean',
                                                   suffix='_mean'),
                                        name='meanfunc',
                                        wall_time=5,
                                        requirements=[fsl5_req])
        pipeline.connect(afni_mc, 'out_file', meanfunc, 'in_file')

        add_mean = pipeline.create_node(ImageMaths(op_string='-add'),
                                        name='add_mean',
                                        wall_time=5,
                                        requirements=[fsl5_req])
        pipeline.connect(filt, 'out_file', add_mean, 'in_file')
        pipeline.connect(meanfunc, 'out_file', add_mean, 'in_file2')

        pipeline.connect_output('filtered_data', add_mean, 'out_file')
        pipeline.connect_output('mc_par', afni_mc, 'oned_file')

        return pipeline

    def single_subject_melodic_pipeline(self, **kwargs):

        pipeline = self.create_pipeline(
            name='MelodicL1',
            inputs=[
                DatasetSpec('filtered_data', nifti_gz_format),
                FieldSpec('tr', float),
                DatasetSpec('brain_mask', nifti_gz_format)
            ],
            outputs=[DatasetSpec('melodic_ica', directory_format)],
            desc=("Single subject ICA analysis using FSL MELODIC."),
            version=1,
            citations=[fsl_cite],
            **kwargs)

        mel = pipeline.create_node(MELODIC(),
                                   name='melodic_L1',
                                   wall_time=15,
                                   requirements=[fsl5_req])
        mel.inputs.no_bet = True
        pipeline.connect_input('brain_mask', mel, 'mask')
        mel.inputs.bg_threshold = self.parameter('brain_thresh_percent')
        mel.inputs.report = True
        mel.inputs.out_stats = True
        mel.inputs.mm_thresh = 0.5
        mel.inputs.out_dir = 'melodic_ica'
        pipeline.connect_input('tr', mel, 'tr_sec')
        pipeline.connect_input('filtered_data', mel, 'in_files')

        pipeline.connect_output('melodic_ica', mel, 'out_dir')

        return pipeline

    def fix_preparation_pipeline(self, **kwargs):

        pipeline = self.create_pipeline(
            name='prepare_fix',
            inputs=[
                DatasetSpec('melodic_ica', directory_format),
                DatasetSpec('filtered_data', nifti_gz_format),
                DatasetSpec('coreg_to_atlas_mat', text_matrix_format),
                DatasetSpec('coreg_matrix', text_matrix_format),
                DatasetSpec('preproc', nifti_gz_format),
                DatasetSpec('brain', nifti_gz_format),
                DatasetSpec('coreg_ref_brain', nifti_gz_format),
                DatasetSpec('mc_par', par_format),
                DatasetSpec('brain_mask', nifti_gz_format)
            ],
            outputs=[
                DatasetSpec('fix_dir', directory_format),
                DatasetSpec('hand_label_noise', text_format)
            ],
            desc=("Pipeline to create the right folder structure before "
                  "running FIX"),
            version=1,
            citations=[fsl_cite],
            **kwargs)

        struct_ants2fsl = pipeline.create_node(ANTs2FSLMatrixConversion(),
                                               name='struct_ants2fsl',
                                               requirements=[c3d_req])
        struct_ants2fsl.inputs.ras2fsl = True
        struct_ants2fsl.inputs.reference_file = self.parameter('MNI_template')
        pipeline.connect_input('coreg_to_atlas_mat', struct_ants2fsl,
                               'itk_file')
        pipeline.connect_input('coreg_ref_brain', struct_ants2fsl,
                               'source_file')
        epi_ants2fsl = pipeline.create_node(ANTs2FSLMatrixConversion(),
                                            name='epi_ants2fsl',
                                            requirements=[c3d_req])
        epi_ants2fsl.inputs.ras2fsl = True
        pipeline.connect_input('brain', epi_ants2fsl, 'source_file')
        pipeline.connect_input('coreg_matrix', epi_ants2fsl, 'itk_file')
        pipeline.connect_input('coreg_ref_brain', epi_ants2fsl,
                               'reference_file')

        MNI2t1 = pipeline.create_node(ConvertXFM(),
                                      name='MNI2t1',
                                      wall_time=5,
                                      requirements=[fsl509_req])
        MNI2t1.inputs.invert_xfm = True
        pipeline.connect(struct_ants2fsl, 'fsl_matrix', MNI2t1, 'in_file')

        struct2epi = pipeline.create_node(ConvertXFM(),
                                          name='struct2epi',
                                          wall_time=5,
                                          requirements=[fsl509_req])
        struct2epi.inputs.invert_xfm = True
        pipeline.connect(epi_ants2fsl, 'fsl_matrix', struct2epi, 'in_file')

        meanfunc = pipeline.create_node(ImageMaths(op_string='-Tmean',
                                                   suffix='_mean'),
                                        name='meanfunc',
                                        wall_time=5,
                                        requirements=[fsl509_req])
        pipeline.connect_input('preproc', meanfunc, 'in_file')

        prep_fix = pipeline.create_node(PrepareFIX(), name='prep_fix')
        pipeline.connect_input('melodic_ica', prep_fix, 'melodic_dir')
        pipeline.connect_input('coreg_ref_brain', prep_fix, 't1_brain')
        pipeline.connect_input('mc_par', prep_fix, 'mc_par')
        pipeline.connect_input('brain_mask', prep_fix, 'epi_brain_mask')
        pipeline.connect_input('preproc', prep_fix, 'epi_preproc')
        pipeline.connect_input('filtered_data', prep_fix, 'filtered_epi')
        pipeline.connect(epi_ants2fsl, 'fsl_matrix', prep_fix, 'epi2t1_mat')
        pipeline.connect(struct_ants2fsl, 'fsl_matrix', prep_fix, 't12MNI_mat')
        pipeline.connect(MNI2t1, 'out_file', prep_fix, 'MNI2t1_mat')
        pipeline.connect(struct2epi, 'out_file', prep_fix, 't12epi_mat')
        pipeline.connect(meanfunc, 'out_file', prep_fix, 'epi_mean')

        pipeline.connect_output('fix_dir', prep_fix, 'fix_dir')
        pipeline.connect_output('hand_label_noise', prep_fix,
                                'hand_label_file')

        return pipeline

    def fix_classification_pipeline(self, **kwargs):

        pipeline = self.create_pipeline(
            name='fix_classification',
            inputs=[
                DatasetSpec('train_data',
                            rfile_format,
                            frequency='per_project'),
                DatasetSpec('fix_dir', directory_format)
            ],
            outputs=[DatasetSpec('labelled_components', text_format)],
            desc=("Automatic classification of noisy components from the "
                  "rsfMRI data using fsl FIX."),
            version=1,
            citations=[fsl_cite],
            **kwargs)

        fix = pipeline.create_node(FSLFIX(),
                                   name="fix",
                                   wall_time=30,
                                   requirements=[fsl509_req, fix_req])
        pipeline.connect_input("fix_dir", fix, "feat_dir")
        pipeline.connect_input("train_data", fix, "train_data")
        fix.inputs.component_threshold = self.parameter('component_threshold')
        fix.inputs.motion_reg = self.parameter('motion_reg')
        fix.inputs.classification = True

        pipeline.connect_output('labelled_components', fix, 'label_file')

        return pipeline

    def fix_regression_pipeline(self, **kwargs):

        pipeline = self.create_pipeline(
            name='signal_regression',
            inputs=[
                DatasetSpec('fix_dir', directory_format),
                DatasetSpec('labelled_components', text_format)
            ],
            outputs=[DatasetSpec('cleaned_file', nifti_gz_format)],
            desc=("Regression of the noisy components from the rsfMRI data "
                  "using a python implementation equivalent to that in FIX."),
            version=1,
            citations=[fsl_cite],
            **kwargs)

        signal_reg = pipeline.create_node(SignalRegression(),
                                          name="signal_reg",
                                          wall_time=30,
                                          requirements=[fsl509_req, fix_req])
        pipeline.connect_input("fix_dir", signal_reg, "fix_dir")
        pipeline.connect_input("labelled_components", signal_reg,
                               "labelled_components")
        signal_reg.inputs.motion_regression = self.parameter('motion_reg')
        signal_reg.inputs.highpass = self.parameter('highpass')

        pipeline.connect_output('cleaned_file', signal_reg, 'output')

        return pipeline

    def timeseries_normalization_to_atlas_pipeline(self, **kwargs):

        pipeline = self.create_pipeline(
            name='timeseries_normalization_to_atlas_pipeline',
            inputs=[
                DatasetSpec('cleaned_file', nifti_gz_format),
                DatasetSpec('coreg_to_atlas_warp', nifti_gz_format),
                DatasetSpec('coreg_to_atlas_mat', text_matrix_format),
                DatasetSpec('coreg_matrix', text_matrix_format)
            ],
            outputs=[DatasetSpec('normalized_ts', nifti_gz_format)],
            desc=("Apply ANTs transformation to the fmri filtered file to "
                  "normalize it to MNI 2mm."),
            version=1,
            citations=[fsl_cite],
            **kwargs)

        merge_trans = pipeline.create_node(NiPypeMerge(3),
                                           name='merge_transforms',
                                           wall_time=1)
        pipeline.connect_input('coreg_to_atlas_warp', merge_trans, 'in1')
        pipeline.connect_input('coreg_to_atlas_mat', merge_trans, 'in2')
        pipeline.connect_input('coreg_matrix', merge_trans, 'in3')

        apply_trans = pipeline.create_node(ApplyTransforms(),
                                           name='ApplyTransform',
                                           wall_time=7,
                                           memory=24000,
                                           requirements=[ants2_req])
        ref_brain = self.parameter('MNI_template')
        apply_trans.inputs.reference_image = ref_brain
        apply_trans.inputs.interpolation = 'Linear'
        apply_trans.inputs.input_image_type = 3
        pipeline.connect(merge_trans, 'out', apply_trans, 'transforms')
        pipeline.connect_input('cleaned_file', apply_trans, 'input_image')

        pipeline.connect_output('normalized_ts', apply_trans, 'output_image')

        return pipeline

    def smoothing_pipeline(self, **kwargs):

        pipeline = self.create_pipeline(
            name='smoothing_pipeline',
            inputs=[DatasetSpec('normalized_ts', nifti_gz_format)],
            outputs=[DatasetSpec('smoothed_ts', nifti_gz_format)],
            desc=("Spatial smoothing of the normalized fmri file"),
            version=1,
            citations=[fsl_cite],
            **kwargs)

        smooth = pipeline.create_node(BlurToFWHM(),
                                      name='3dBlurToFWHM',
                                      wall_time=5,
                                      requirements=[afni_req])
        smooth.inputs.fwhm = 5
        smooth.inputs.out_file = 'smoothed_ts.nii.gz'
        smooth.inputs.mask = self.parameter('MNI_template_mask')
        pipeline.connect_input('normalized_ts', smooth, 'in_file')

        pipeline.connect_output('smoothed_ts', smooth, 'out_file')

        return pipeline
Пример #12
0
class DynamicPETStudy(PETStudy, metaclass=StudyMetaClass):

    add_data_specs = [
        DatasetSpec('pet_volumes', nifti_gz_format),
        DatasetSpec('regression_map', nifti_gz_format),
        DatasetSpec('pet_image', nifti_gz_format,
                    'Extract_vol_pipeline'),
        DatasetSpec('registered_volumes', nifti_gz_format,
                    'ApplyTransform_pipeline'),
        DatasetSpec('detrended_volumes', nifti_gz_format,
                    'Baseline_Removal_pipeline'),
        DatasetSpec('spatial_map', nifti_gz_format,
                    'Dual_Regression_pipeline'),
        DatasetSpec('ts', png_format, 'Dual_Regression_pipeline')]

    add_parameter_specs = [
        ParameterSpec('trans_template',
                   os.path.join(template_path, 'PET_template.nii.gz')),
        ParameterSpec('base_remove_th', 0),
        ParameterSpec('base_remove_binarize', False),
        ParameterSpec('regress_th', 0),
        ParameterSpec('regress_binarize', False)]

    def Extract_vol_pipeline(self, **kwargs):
        pipeline = self.create_pipeline(
            name='Extract_volume',
            inputs=[DatasetSpec('pet_volumes', nifti_gz_format)],
            outputs=[DatasetSpec('pet_image', nifti_gz_format)],
            desc=('Extract the last volume of the 4D PET timeseries'),
            version=1,
            citations=[],
            **kwargs)

        fslroi = pipeline.create_node(
            ExtractROI(roi_file='vol.nii.gz', t_min=79, t_size=1),
            name='fslroi')
        pipeline.connect_input('pet_volumes', fslroi, 'in_file')
        pipeline.connect_output('pet_image', fslroi, 'roi_file')
        return pipeline

    def ApplyTransform_pipeline(self, **kwargs):
        pipeline = self.create_pipeline(
            name='applytransform',
            inputs=[DatasetSpec('pet_volumes', nifti_gz_format),
                    DatasetSpec('warp_file', nifti_gz_format),
                    DatasetSpec('affine_mat', text_matrix_format)],
            outputs=[DatasetSpec('registered_volumes', nifti_gz_format)],
            desc=('Apply transformation the the 4D PET timeseries'),
            version=1,
            citations=[],
            **kwargs)

        merge_trans = pipeline.create_node(Merge(2), name='merge_transforms')
        pipeline.connect_input('warp_file', merge_trans, 'in1')
        pipeline.connect_input('affine_mat', merge_trans, 'in2')

        apply_trans = pipeline.create_node(
            ApplyTransforms(), name='ApplyTransform')
        apply_trans.inputs.reference_image = self.parameter(
            'trans_template')
        apply_trans.inputs.interpolation = 'Linear'
        apply_trans.inputs.input_image_type = 3
        pipeline.connect(merge_trans, 'out', apply_trans, 'transforms')
        pipeline.connect_input('pet_volumes', apply_trans, 'input_image')

        pipeline.connect_output('registered_volumes', apply_trans,
                                'output_image')
        return pipeline

    def Baseline_Removal_pipeline(self, **kwargs):

        pipeline = self.create_pipeline(
            name='Baseline_removal',
            inputs=[DatasetSpec('registered_volumes', nifti_gz_format)],
            outputs=[DatasetSpec('detrended_volumes', nifti_gz_format)],
            desc=('PET dual regression'),
            citations=[],
            version=1,
            **kwargs)

        br = pipeline.create_node(GlobalTrendRemoval(),
                                  name='Baseline_removal')
        pipeline.connect_input('registered_volumes', br, 'volume')
        pipeline.connect_output('detrended_volumes', br, 'detrended_file')
        return pipeline

    def Dual_Regression_pipeline(self, **kwargs):

        pipeline = self.create_pipeline(
            name='Dual_regression',
            inputs=[DatasetSpec('detrended_volumes', nifti_gz_format),
                    DatasetSpec('regression_map', nifti_gz_format)],
            outputs=[DatasetSpec('spatial_map', nifti_gz_format),
                     DatasetSpec('ts', png_format)],
            desc=('PET dual regression'),
            citations=[],
            version=1,
            **kwargs)

        dr = pipeline.create_node(PETdr(), name='PET_dr')
        dr.inputs.threshold = self.parameter('regress_th')
        dr.inputs.binarize = self.parameter('regress_binarize')
        pipeline.connect_input('detrended_volumes', dr, 'volume')
        pipeline.connect_input('regression_map', dr, 'regression_map')

        pipeline.connect_output('spatial_map', dr, 'spatial_map')
        pipeline.connect_output('ts', dr, 'timecourse')
        return pipeline
#     def example_pipeline_switch(self, tool='atool', **kwargs):
#         if tool == 'atool':
#             pipeline = self._atool_pipeline(**kwargs)
#         else:
#             pipeline = self._anothertool_pipeline(**kwargs)
#         return pipeline

    def dynamics_ica_pipeline(self, **kwargs):
        return self._ICA_pipeline_factory(
            input_dataset=DatasetSpec(
                'registered_volumes', nifti_gz_format, **kwargs))
Пример #13
0
class NODDIStudy(DiffusionStudy, metaclass=StudyMetaClass):

    add_data_specs = [
        DatasetSpec('low_b_dw_scan', mrtrix_format),
        DatasetSpec('high_b_dw_scan', mrtrix_format),
        DatasetSpec('forward_pe', mrtrix_format),
        DatasetSpec('reverse_pe', mrtrix_format),
        DatasetSpec('dwi_scan', mrtrix_format, 'concatenate_pipeline'),
        DatasetSpec('ficvf', nifti_format, 'noddi_fitting_pipeline'),
        DatasetSpec('odi', nifti_format, 'noddi_fitting_pipeline'),
        DatasetSpec('fiso', nifti_format, 'noddi_fitting_pipeline'),
        DatasetSpec('fibredirs_xvec', nifti_format, 'noddi_fitting_pipeline'),
        DatasetSpec('fibredirs_yvec', nifti_format, 'noddi_fitting_pipeline'),
        DatasetSpec('fibredirs_zvec', nifti_format, 'noddi_fitting_pipeline'),
        DatasetSpec('fmin', nifti_format, 'noddi_fitting_pipeline'),
        DatasetSpec('kappa', nifti_format, 'noddi_fitting_pipeline'),
        DatasetSpec('error_code', nifti_format, 'noddi_fitting_pipeline')
    ]

    add_parameter_specs = [
        ParameterSpec('noddi_model', 'WatsonSHStickTortIsoV_B0')
    ]

    add_switch_specs = [SwitchSpec('single_slice', False)]

    def concatenate_pipeline(self, **kwargs):  # @UnusedVariable
        """
        Concatenates two dMRI datasets (with different b-values) along the
        DW encoding (4th) axis
        """
        pipeline = self.create_pipeline(
            name='concatenation',
            inputs=[
                DatasetSpec('low_b_dw_scan', mrtrix_format),
                DatasetSpec('high_b_dw_scan', mrtrix_format)
            ],
            outputs=[DatasetSpec('dwi_scan', mrtrix_format)],
            desc=("Concatenate low and high b-value dMRI datasets for NODDI "
                  "processing"),
            version=1,
            citations=[mrtrix_cite],
            **kwargs)
        # Create concatenation node
        mrcat = pipeline.create_node(MRCat(),
                                     name='mrcat',
                                     requirements=[mrtrix3_req])
        mrcat.inputs.quiet = True
        # Connect inputs
        pipeline.connect_input('low_b_dw_scan', mrcat, 'first_scan')
        pipeline.connect_input('high_b_dw_scan', mrcat, 'second_scan')
        # Connect outputs
        pipeline.connect_output('dwi_scan', mrcat, 'out_file')
        # Check inputs/outputs are connected
        return pipeline

    def noddi_fitting_pipeline(self, **kwargs):  # @UnusedVariable
        """
        Creates a ROI in which the NODDI processing will be performed

        Parameters
        ----------
        single_slice: Int
            If provided the processing is only performed on a single slice
            (for testing)
        noddi_model: Str
            Name of the NODDI model to use for the fitting
        nthreads: Int
            Number of processes to use
        """
        pipeline_name = 'noddi_fitting'
        inputs = [
            DatasetSpec('bias_correct', nifti_gz_format),
            DatasetSpec('grad_dirs', fsl_bvecs_format),
            DatasetSpec('bvalues', fsl_bvals_format)
        ]
        if self.switch('single_slice'):
            inputs.append(DatasetSpec('eroded_mask', nifti_gz_format))
        else:
            inputs.append(DatasetSpec('brain_mask', nifti_gz_format))
        pipeline = self.create_pipeline(
            name=pipeline_name,
            inputs=inputs,
            outputs=[
                DatasetSpec('ficvf', nifti_format),
                DatasetSpec('odi', nifti_format),
                DatasetSpec('fiso', nifti_format),
                DatasetSpec('fibredirs_xvec', nifti_format),
                DatasetSpec('fibredirs_yvec', nifti_format),
                DatasetSpec('fibredirs_zvec', nifti_format),
                DatasetSpec('fmin', nifti_format),
                DatasetSpec('kappa', nifti_format),
                DatasetSpec('error_code', nifti_format)
            ],
            desc=("Creates a ROI in which the NODDI processing will be "
                  "performed"),
            citations=[noddi_cite],
            **kwargs)
        # Create node to unzip the nifti files
        unzip_bias_correct = pipeline.create_node(MRConvert(),
                                                  name="unzip_bias_correct",
                                                  requirements=[mrtrix3_req])
        unzip_bias_correct.inputs.out_ext = 'nii'
        unzip_bias_correct.inputs.quiet = True
        unzip_mask = pipeline.create_node(MRConvert(),
                                          name="unzip_mask",
                                          requirements=[mrtrix3_req])
        unzip_mask.inputs.out_ext = 'nii'
        unzip_mask.inputs.quiet = True
        # Create create-roi node
        create_roi = pipeline.create_node(
            CreateROI(),
            name='create_roi',
            requirements=[noddi_req, matlab2015_req],
            memory=4000)
        pipeline.connect(unzip_bias_correct, 'out_file', create_roi, 'in_file')
        pipeline.connect(unzip_mask, 'out_file', create_roi, 'brain_mask')
        # Create batch-fitting node
        batch_fit = pipeline.create_node(
            BatchNODDIFitting(),
            name="batch_fit",
            requirements=[noddi_req, matlab2015_req],
            wall_time=180,
            memory=8000)
        batch_fit.inputs.model = self.parameter('noddi_model')
        batch_fit.inputs.nthreads = self.runner.num_processes
        pipeline.connect(create_roi, 'out_file', batch_fit, 'roi_file')
        # Create output node
        save_params = pipeline.create_node(
            SaveParamsAsNIfTI(),
            name="save_params",
            requirements=[noddi_req, matlab2015_req],
            memory=4000)
        save_params.inputs.output_prefix = 'params'
        pipeline.connect(batch_fit, 'out_file', save_params, 'params_file')
        pipeline.connect(create_roi, 'out_file', save_params, 'roi_file')
        pipeline.connect(unzip_mask, 'out_file', save_params,
                         'brain_mask_file')
        # Connect inputs
        pipeline.connect_input('bias_correct', unzip_bias_correct, 'in_file')
        if not pipeline.switch('single_slice'):
            pipeline.connect_input('eroded_mask', unzip_mask, 'in_file')
        else:
            pipeline.connect_input('brain_mask', unzip_mask, 'in_file')
        pipeline.connect_input('grad_dirs', batch_fit, 'bvecs_file')
        pipeline.connect_input('bvalues', batch_fit, 'bvals_file')
        # Connect outputs
        pipeline.connect_output('ficvf', save_params, 'ficvf')
        pipeline.connect_output('odi', save_params, 'odi')
        pipeline.connect_output('fiso', save_params, 'fiso')
        pipeline.connect_output('fibredirs_xvec', save_params,
                                'fibredirs_xvec')
        pipeline.connect_output('fibredirs_yvec', save_params,
                                'fibredirs_yvec')
        pipeline.connect_output('fibredirs_zvec', save_params,
                                'fibredirs_zvec')
        pipeline.connect_output('fmin', save_params, 'fmin')
        pipeline.connect_output('kappa', save_params, 'kappa')
        pipeline.connect_output('error_code', save_params, 'error_code')
        # Check inputs/outputs are connected
        return pipeline
Пример #14
0
class DiffusionStudy(EPIStudy, metaclass=StudyMetaClass):

    add_data_specs = [
        DatasetSpec('dwi_reference', nifti_gz_format, optional=True),
        DatasetSpec('forward_pe', dicom_format, optional=True),
        DatasetSpec('b0',
                    nifti_gz_format,
                    'extract_b0_pipeline',
                    desc="b0 image"),
        DatasetSpec('noise_residual', mrtrix_format, 'preproc_pipeline'),
        DatasetSpec('tensor', nifti_gz_format, 'tensor_pipeline'),
        DatasetSpec('fa', nifti_gz_format, 'tensor_pipeline'),
        DatasetSpec('adc', nifti_gz_format, 'tensor_pipeline'),
        DatasetSpec('wm_response', text_format, 'response_pipeline'),
        DatasetSpec('gm_response', text_format, 'response_pipeline'),
        DatasetSpec('csf_response', text_format, 'response_pipeline'),
        DatasetSpec('avg_response', text_format, 'average_response_pipeline'),
        DatasetSpec('wm_odf', mrtrix_format, 'fod_pipeline'),
        DatasetSpec('gm_odf', mrtrix_format, 'fod_pipeline'),
        DatasetSpec('csf_odf', mrtrix_format, 'fod_pipeline'),
        DatasetSpec('bias_correct', nifti_gz_format, 'bias_correct_pipeline'),
        DatasetSpec('grad_dirs', fsl_bvecs_format, 'preproc_pipeline'),
        DatasetSpec('bvalues', fsl_bvals_format, 'preproc_pipeline'),
        DatasetSpec('eddy_par', eddy_par_format, 'preproc_pipeline'),
        DatasetSpec('align_mats', directory_format,
                    'intrascan_alignment_pipeline'),
        DatasetSpec('tbss_mean_fa',
                    nifti_gz_format,
                    'tbss_pipeline',
                    frequency='per_project'),
        DatasetSpec('tbss_proj_fa',
                    nifti_gz_format,
                    'tbss_pipeline',
                    frequency='per_project'),
        DatasetSpec('tbss_skeleton',
                    nifti_gz_format,
                    'tbss_pipeline',
                    frequency='per_project'),
        DatasetSpec('tbss_skeleton_mask',
                    nifti_gz_format,
                    'tbss_pipeline',
                    frequency='per_project'),
        DatasetSpec('brain', nifti_gz_format, 'brain_extraction_pipeline'),
        DatasetSpec('brain_mask', nifti_gz_format,
                    'brain_extraction_pipeline'),
        DatasetSpec('norm_intensity', mrtrix_format,
                    'intensity_normalisation_pipeline'),
        DatasetSpec('norm_intens_fa_template',
                    mrtrix_format,
                    'intensity_normalisation_pipeline',
                    frequency='per_project'),
        DatasetSpec('norm_intens_wm_mask',
                    mrtrix_format,
                    'intensity_normalisation_pipeline',
                    frequency='per_project')
    ]

    add_parameter_specs = [
        ParameterSpec('multi_tissue', True),
        ParameterSpec('preproc_pe_dir', None, dtype=str),
        ParameterSpec('tbss_skel_thresh', 0.2),
        ParameterSpec('fsl_mask_f', 0.25),
        ParameterSpec('bet_robust', True),
        ParameterSpec('bet_f_threshold', 0.2),
        ParameterSpec('bet_reduce_bias', False)
    ]

    add_switch_specs = [
        SwitchSpec('preproc_denoise', False),
        SwitchSpec('response_algorithm', 'tax',
                   ('tax', 'dhollander', 'msmt_5tt')),
        SwitchSpec('fod_algorithm', 'csd', ('csd', 'msmt_csd')),
        SwitchSpec('brain_extract_method', 'mrtrix', ('mrtrix', 'fsl')),
        SwitchSpec('bias_correct_method', 'ants', choices=('ants', 'fsl'))
    ]

    @property
    def multi_tissue(self):
        return self.branch('response_algorithm', ('msmt_5tt', 'dhollander'))

    def preproc_pipeline(self, **kwargs):  # @UnusedVariable @IgnorePep8
        """
        Performs a series of FSL preprocessing steps, including Eddy and Topup

        Parameters
        ----------
        phase_dir : str{AP|LR|IS}
            The phase encode direction
        """

        outputs = [
            DatasetSpec('preproc', nifti_gz_format),
            DatasetSpec('grad_dirs', fsl_bvecs_format),
            DatasetSpec('bvalues', fsl_bvals_format),
            DatasetSpec('eddy_par', eddy_par_format)
        ]
        citations = [fsl_cite, eddy_cite, topup_cite, distort_correct_cite]
        if self.switch('preproc_denoise'):
            outputs.append(DatasetSpec('noise_residual', mrtrix_format))
            citations.extend(dwidenoise_cites)

        if 'dwi_reference' in self.input_names or 'reverse_phase' in self.input_names:
            inputs = [
                DatasetSpec('primary', dicom_format),
                FieldSpec('ped', dtype=str),
                FieldSpec('pe_angle', dtype=str)
            ]
            if 'dwi_reference' in self.input_names:
                inputs.append(DatasetSpec('dwi_reference', nifti_gz_format))
            if 'reverse_phase' in self.input_names:
                inputs.append(DatasetSpec('reverse_phase', nifti_gz_format))
            distortion_correction = True
        else:
            inputs = [DatasetSpec('primary', dicom_format)]
            distortion_correction = False

        pipeline = self.create_pipeline(
            name='preprocess',
            inputs=inputs,
            outputs=outputs,
            desc=("Preprocess dMRI studies using distortion correction"),
            version=1,
            citations=citations,
            **kwargs)
        # Denoise the dwi-scan
        if self.switch('preproc_denoise'):
            # Run denoising
            denoise = pipeline.create_node(DWIDenoise(),
                                           name='denoise',
                                           requirements=[mrtrix3_req])
            denoise.inputs.out_file_ext = '.mif'
            # Calculate residual noise
            subtract_operands = pipeline.create_node(Merge(2),
                                                     name='subtract_operands')
            subtract = pipeline.create_node(MRCalc(),
                                            name='subtract',
                                            requirements=[mrtrix3_req])
            subtract.inputs.out_ext = '.mif'
            subtract.inputs.operation = 'subtract'
        dwipreproc = pipeline.create_node(
            DWIPreproc(),
            name='dwipreproc',
            requirements=[mrtrix3_req, fsl509_req],
            wall_time=60)
        dwipreproc.inputs.eddy_parameters = '--data_is_shelled '
        dwipreproc.inputs.no_clean_up = True
        dwipreproc.inputs.out_file_ext = '.nii.gz'
        dwipreproc.inputs.temp_dir = 'dwipreproc_tempdir'
        # Create node to reorient preproc out_file
        swap = pipeline.create_node(fsl.utils.Reorient2Std(),
                                    name='fslreorient2std',
                                    requirements=[fsl509_req])
        if distortion_correction:
            # Extract b=0 volumes
            dwiextract = pipeline.create_node(ExtractDWIorB0(),
                                              name='dwiextract',
                                              requirements=[mrtrix3_req])
            dwiextract.inputs.bzero = True
            dwiextract.inputs.out_ext = '.nii.gz'
            # Get first b=0 from dwi b=0 volumes
            mrconvert = pipeline.create_node(MRConvert(),
                                             name="mrconvert",
                                             requirements=[mrtrix3_req])
            mrconvert.inputs.coord = (3, 0)
            # Concatenate extracted forward rpe with reverse rpe
            mrcat = pipeline.create_node(MRCat(),
                                         name='mrcat',
                                         requirements=[mrtrix3_req])
            # Create node to assign the right PED to the diffusion
            prep_dwi = pipeline.create_node(PrepareDWI(), name='prepare_dwi')
            # Create preprocessing node
            dwipreproc.inputs.rpe_pair = True
            if self.parameter('preproc_pe_dir') is not None:
                dwipreproc.inputs.pe_dir = self.parameter('preproc_pe_dir')
            # Create nodes to gradients to FSL format
            extract_grad = pipeline.create_node(ExtractFSLGradients(),
                                                name="extract_grad",
                                                requirements=[mrtrix3_req])
            # Connect inputs
            if 'dwi_reference' in self.input_names:
                pipeline.connect_input('dwi_reference', mrcat, 'second_scan')
            elif 'reverse_phase' in self.input_names:
                pipeline.connect_input('reverse_phase', mrcat, 'second_scan')
            else:
                assert False
            pipeline.connect_input('primary', dwiextract, 'in_file')
        # Connect inter-nodes
        if self.switch('preproc_denoise'):
            pipeline.connect_input('primary', denoise, 'in_file')
            pipeline.connect_input('primary', subtract_operands, 'in1')
            pipeline.connect(denoise, 'out_file', dwipreproc, 'in_file')
            pipeline.connect(denoise, 'noise', subtract_operands, 'in2')
            pipeline.connect(subtract_operands, 'out', subtract, 'operands')
        else:
            pipeline.connect_input('primary', dwipreproc, 'in_file')
        if distortion_correction:
            pipeline.connect_input('ped', prep_dwi, 'pe_dir')
            pipeline.connect_input('pe_angle', prep_dwi, 'ped_polarity')
            pipeline.connect(prep_dwi, 'pe', dwipreproc, 'pe_dir')
            pipeline.connect(mrcat, 'out_file', dwipreproc, 'se_epi')
            pipeline.connect(dwiextract, 'out_file', mrconvert, 'in_file')
            pipeline.connect(mrconvert, 'out_file', mrcat, 'first_scan')
        pipeline.connect_input('primary', extract_grad, 'in_file')
        pipeline.connect(dwipreproc, 'out_file', swap, 'in_file')
        # Connect outputs
        pipeline.connect_output('preproc', swap, 'out_file')
        pipeline.connect_output('grad_dirs', extract_grad, 'bvecs_file')
        pipeline.connect_output('bvalues', extract_grad, 'bvals_file')
        pipeline.connect_output('eddy_par', dwipreproc, 'eddy_parameters')
        if self.switch('preproc_denoise'):
            pipeline.connect_output('noise_residual', subtract, 'out_file')
        # Check inputs/outputs are connected
        return pipeline

    def brain_extraction_pipeline(self,
                                  **kwargs):  # @UnusedVariable @IgnorePep8
        """
        Generates a whole brain mask using MRtrix's 'dwi2mask' command

        Parameters
        ----------
        mask_tool: Str
            Can be either 'bet' or 'dwi2mask' depending on which mask tool you
            want to use
        """
        if self.branch('brain_extract_method', 'mrtrix'):
            pipeline = self.create_pipeline(
                'brain_extraction',
                inputs=[
                    DatasetSpec('preproc', nifti_gz_format),
                    DatasetSpec('grad_dirs', fsl_bvecs_format),
                    DatasetSpec('bvalues', fsl_bvals_format)
                ],
                outputs=[DatasetSpec('brain_mask', nifti_gz_format)],
                desc="Generate brain mask from b0 images",
                version=1,
                citations=[mrtrix_cite],
                **kwargs)
            # Create mask node
            dwi2mask = pipeline.create_node(BrainMask(),
                                            name='dwi2mask',
                                            requirements=[mrtrix3_req])
            dwi2mask.inputs.out_file = 'brain_mask.nii.gz'
            # Gradient merge node
            grad_fsl = pipeline.create_node(MergeTuple(2), name="grad_fsl")
            # Connect nodes
            pipeline.connect(grad_fsl, 'out', dwi2mask, 'grad_fsl')
            # Connect inputs
            pipeline.connect_input('grad_dirs', grad_fsl, 'in1')
            pipeline.connect_input('bvalues', grad_fsl, 'in2')
            pipeline.connect_input('preproc', dwi2mask, 'in_file')
            # Connect outputs
            pipeline.connect_output('brain_mask', dwi2mask, 'out_file')
            # Check inputs/outputs are connected
            pipeline.assert_connected()
        else:
            pipeline = super(DiffusionStudy,
                             self).brain_extraction_pipeline(**kwargs)
        return pipeline

    def bias_correct_pipeline(self, **kwargs):  # @UnusedVariable @IgnorePep8
        """
        Corrects B1 field inhomogeneities
        """
        bias_method = self.switch('bias_correct_method')
        pipeline = self.create_pipeline(
            name='bias_correct',
            inputs=[
                DatasetSpec('preproc', nifti_gz_format),
                DatasetSpec('brain_mask', nifti_gz_format),
                DatasetSpec('grad_dirs', fsl_bvecs_format),
                DatasetSpec('bvalues', fsl_bvals_format)
            ],
            outputs=[DatasetSpec('bias_correct', nifti_gz_format)],
            desc="Corrects for B1 field inhomogeneity",
            version=1,
            citations=[
                fast_cite, (n4_cite if bias_method == 'ants' else fsl_cite)
            ],
            **kwargs)
        # Create bias correct node
        bias_correct = pipeline.create_node(
            DWIBiasCorrect(),
            name="bias_correct",
            requirements=(
                [mrtrix3_req] +
                [ants2_req if bias_method == 'ants' else fsl509_req]))
        bias_correct.inputs.method = bias_method
        # Gradient merge node
        fsl_grads = pipeline.create_node(MergeTuple(2), name="fsl_grads")
        # Connect nodes
        pipeline.connect(fsl_grads, 'out', bias_correct, 'grad_fsl')
        # Connect to inputs
        pipeline.connect_input('grad_dirs', fsl_grads, 'in1')
        pipeline.connect_input('bvalues', fsl_grads, 'in2')
        pipeline.connect_input('preproc', bias_correct, 'in_file')
        pipeline.connect_input('brain_mask', bias_correct, 'mask')
        # Connect to outputs
        pipeline.connect_output('bias_correct', bias_correct, 'out_file')
        # Check inputs/output are connected
        return pipeline

    def intensity_normalisation_pipeline(self, **kwargs):
        pipeline = self.create_pipeline(
            name='intensity_normalization',
            inputs=[
                DatasetSpec('bias_correct', nifti_gz_format),
                DatasetSpec('brain_mask', nifti_gz_format),
                DatasetSpec('grad_dirs', fsl_bvecs_format),
                DatasetSpec('bvalues', fsl_bvals_format)
            ],
            outputs=[
                DatasetSpec('norm_intensity', mrtrix_format),
                DatasetSpec('norm_intens_fa_template',
                            mrtrix_format,
                            frequency='per_project'),
                DatasetSpec('norm_intens_wm_mask',
                            mrtrix_format,
                            frequency='per_project')
            ],
            desc="Corrects for B1 field inhomogeneity",
            version=1,
            citations=[mrtrix3_req],
            **kwargs)
        # Convert from nifti to mrtrix format
        grad_merge = pipeline.create_node(MergeTuple(2), name="grad_merge")
        mrconvert = pipeline.create_node(MRConvert(), name='mrconvert')
        mrconvert.inputs.out_ext = '.mif'
        # Set up join nodes
        fields = ['dwis', 'masks', 'subject_ids', 'visit_ids']
        join_subjects = pipeline.create_join_subjects_node(
            IdentityInterface(fields), joinfield=fields, name='join_subjects')
        join_visits = pipeline.create_join_visits_node(Chain(fields),
                                                       joinfield=fields,
                                                       name='join_visits')
        # Set up expand nodes
        select = pipeline.create_node(SelectSession(), name='expand')
        # Intensity normalization
        intensity_norm = pipeline.create_node(DWIIntensityNorm(),
                                              name='dwiintensitynorm')
        # Connect inputs
        pipeline.connect_input('bias_correct', mrconvert, 'in_file')
        pipeline.connect_input('grad_dirs', grad_merge, 'in1')
        pipeline.connect_input('bvalues', grad_merge, 'in2')
        pipeline.connect_subject_id(join_subjects, 'subject_ids')
        pipeline.connect_visit_id(join_subjects, 'visit_ids')
        pipeline.connect_subject_id(select, 'subject_id')
        pipeline.connect_visit_id(select, 'visit_id')
        pipeline.connect_input('brain_mask', join_subjects, 'masks')
        # Internal connections
        pipeline.connect(grad_merge, 'out', mrconvert, 'grad_fsl')
        pipeline.connect(mrconvert, 'out_file', join_subjects, 'dwis')
        pipeline.connect(join_subjects, 'dwis', join_visits, 'dwis')
        pipeline.connect(join_subjects, 'masks', join_visits, 'masks')
        pipeline.connect(join_subjects, 'subject_ids', join_visits,
                         'subject_ids')
        pipeline.connect(join_subjects, 'visit_ids', join_visits, 'visit_ids')
        pipeline.connect(join_visits, 'dwis', intensity_norm, 'in_files')
        pipeline.connect(join_visits, 'masks', intensity_norm, 'masks')
        pipeline.connect(join_visits, 'subject_ids', select, 'subject_ids')
        pipeline.connect(join_visits, 'visit_ids', select, 'visit_ids')
        pipeline.connect(intensity_norm, 'out_files', select, 'items')
        # Connect outputs
        pipeline.connect_output('norm_intensity', select, 'item')
        pipeline.connect_output('norm_intens_fa_template', intensity_norm,
                                'fa_template')
        pipeline.connect_output('norm_intens_wm_mask', intensity_norm,
                                'wm_mask')
        return pipeline

    def tensor_pipeline(self, **kwargs):  # @UnusedVariable
        """
        Fits the apparrent diffusion tensor (DT) to each voxel of the image
        """
        pipeline = self.create_pipeline(
            name='tensor',
            inputs=[
                DatasetSpec('bias_correct', nifti_gz_format),
                DatasetSpec('grad_dirs', fsl_bvecs_format),
                DatasetSpec('bvalues', fsl_bvals_format),
                DatasetSpec('brain_mask', nifti_gz_format)
            ],
            outputs=[DatasetSpec('tensor', nifti_gz_format)],
            desc=("Estimates the apparent diffusion tensor in each "
                  "voxel"),
            version=1,
            citations=[],
            **kwargs)
        # Create tensor fit node
        dwi2tensor = pipeline.create_node(FitTensor(), name='dwi2tensor')
        dwi2tensor.inputs.out_file = 'dti.nii.gz'
        # Gradient merge node
        fsl_grads = pipeline.create_node(MergeTuple(2), name="fsl_grads")
        # Connect nodes
        pipeline.connect(fsl_grads, 'out', dwi2tensor, 'grad_fsl')
        # Connect to inputs
        pipeline.connect_input('grad_dirs', fsl_grads, 'in1')
        pipeline.connect_input('bvalues', fsl_grads, 'in2')
        pipeline.connect_input('bias_correct', dwi2tensor, 'in_file')
        pipeline.connect_input('brain_mask', dwi2tensor, 'in_mask')
        # Connect to outputs
        pipeline.connect_output('tensor', dwi2tensor, 'out_file')
        # Check inputs/output are connected
        return pipeline

    def fa_pipeline(self, **kwargs):  # @UnusedVariable
        """
        Fits the apparrent diffusion tensor (DT) to each voxel of the image
        """
        pipeline = self.create_pipeline(
            name='fa',
            inputs=[
                DatasetSpec('tensor', nifti_gz_format),
                DatasetSpec('brain_mask', nifti_gz_format)
            ],
            outputs=[
                DatasetSpec('fa', nifti_gz_format),
                DatasetSpec('adc', nifti_gz_format)
            ],
            desc=("Calculates the FA and ADC from a tensor image"),
            version=1,
            citations=[],
            **kwargs)
        # Create tensor fit node
        metrics = pipeline.create_node(TensorMetrics(),
                                       name='metrics',
                                       requirements=[mrtrix3_req])
        metrics.inputs.out_fa = 'fa.nii.gz'
        metrics.inputs.out_adc = 'adc.nii.gz'
        # Connect to inputs
        pipeline.connect_input('tensor', metrics, 'in_file')
        pipeline.connect_input('brain_mask', metrics, 'in_mask')
        # Connect to outputs
        pipeline.connect_output('fa', metrics, 'out_fa')
        pipeline.connect_output('adc', metrics, 'out_adc')
        # Check inputs/output are connected
        return pipeline

    def response_pipeline(self, **kwargs):  # @UnusedVariable
        """
        Estimates the fibre orientation distribution (FOD) using constrained
        spherical deconvolution

        Parameters
        ----------
        response_algorithm : str
            Algorithm used to estimate the response
        """
        outputs = [DatasetSpec('wm_response', text_format)]
        if self.multi_tissue:
            outputs.append(DatasetSpec('gm_response', text_format))
            outputs.append(DatasetSpec('csf_response', text_format))
        pipeline = self.create_pipeline(
            name='response',
            inputs=[
                DatasetSpec('bias_correct', nifti_gz_format),
                DatasetSpec('grad_dirs', fsl_bvecs_format),
                DatasetSpec('bvalues', fsl_bvals_format),
                DatasetSpec('brain_mask', nifti_gz_format)
            ],
            outputs=outputs,
            desc=("Estimates the fibre response function"),
            version=1,
            citations=[mrtrix_cite],
            **kwargs)
        # Create fod fit node
        response = pipeline.create_node(ResponseSD(),
                                        name='response',
                                        requirements=[mrtrix3_req])
        response.inputs.algorithm = self.switch('response_algorithm')
        # Gradient merge node
        fsl_grads = pipeline.create_node(MergeTuple(2), name="fsl_grads")
        # Connect nodes
        pipeline.connect(fsl_grads, 'out', response, 'grad_fsl')
        # Connect to inputs
        pipeline.connect_input('grad_dirs', fsl_grads, 'in1')
        pipeline.connect_input('bvalues', fsl_grads, 'in2')
        pipeline.connect_input('bias_correct', response, 'in_file')
        pipeline.connect_input('brain_mask', response, 'in_mask')
        # Connect to outputs
        pipeline.connect_output('wm_response', response, 'wm_file')
        if self.multi_tissue:
            response.inputs.gm_file = 'gm.txt'
            response.inputs.csf_file = 'csf.txt'
            pipeline.connect_output('gm_response', response, 'gm_file')
            pipeline.connect_output('csf_response', response, 'csf_file')
        # Check inputs/output are connected
        return pipeline

    def average_response_pipeline(self, **kwargs):
        """
        Averages the estimate response function over all subjects in the
        project
        """
        pipeline = self.create_pipeline(
            name='average_response',
            inputs=[DatasetSpec('wm_response', text_format)],
            outputs=[
                DatasetSpec('avg_response',
                            text_format,
                            frequency='per_project')
            ],
            desc=("Averages the fibre response function over the project"),
            version=1,
            citations=[mrtrix_cite],
            **kwargs)
        join_subjects = pipeline.create_join_subjects_node(
            IdentityInterface(['responses']),
            name='join_subjects',
            joinfield=['responses'])
        join_visits = pipeline.create_join_visits_node(Chain(['responses']),
                                                       name='join_visits',
                                                       joinfield=['responses'])
        avg_response = pipeline.create_node(AverageResponse(),
                                            name='avg_response')
        # Connect inputs
        pipeline.connect_input('wm_response', join_subjects, 'responses')
        # Connect inter-nodes
        pipeline.connect(join_subjects, 'responses', join_visits, 'responses')
        pipeline.connect(join_visits, 'responses', avg_response, 'in_files')
        # Connect outputs
        pipeline.connect_output('avg_response', avg_response, 'out_file')
        # Check inputs/output are connected
        return pipeline


#
# algorithm = traits.Enum(
#         'csd',
#         'msmt_csd',
#         argstr='%s',
#         position=-8,
#         mandatory=True,
#         desc='FOD algorithm')
#     in_file = File(
#         exists=True,
#         argstr='%s',
#         position=-7,
#         mandatory=True,
#         desc='input DWI image')
#     wm_txt = File(
#         argstr='%s', position=-6, mandatory=True, desc='WM response text file')
#     wm_odf = File(
#         'wm.mif',
#         argstr='%s',
#         position=-5,
#         usedefault=True,
#         mandatory=True,
#         desc='output WM ODF')
#     gm_txt = File(argstr='%s', position=-4, desc='GM response text file')
#     gm_odf = File('gm.mif', usedefault=True, argstr='%s',
#                   position=-3, desc='output GM ODF')
#     csf_txt = File(argstr='%s', position=-2, desc='CSF response text file')
#     csf_odf = File('csf.mif', usedefault=True, argstr='%s',
#                    position=-1, desc='output CSF ODF')
#     mask_file = File(exists=True, argstr='-mask %s', desc='mask image')

    def fod_pipeline(self, **kwargs):  # @UnusedVariable
        """
        Estimates the fibre orientation distribution (FOD) using constrained
        spherical deconvolution

        Parameters
        ----------
        """
        inputs = [
            DatasetSpec('bias_correct', nifti_gz_format),
            DatasetSpec('grad_dirs', fsl_bvecs_format),
            DatasetSpec('bvalues', fsl_bvals_format),
            DatasetSpec('wm_response', text_format),
            DatasetSpec('brain_mask', nifti_gz_format)
        ]
        outputs = [DatasetSpec('wm_odf', mrtrix_format)]
        if self.multi_tissue:
            inputs.append(DatasetSpec('gm_response', text_format))
            inputs.append(DatasetSpec('csf_response', text_format))
            outputs.append(DatasetSpec('gm_odf', mrtrix_format))
            outputs.append(DatasetSpec('csf_odf', mrtrix_format))
            algorithm = 'msmt_csd'
        else:
            algorithm = 'csd'
        pipeline = self.create_pipeline(
            name='fod',
            inputs=inputs,
            outputs=outputs,
            desc=("Estimates the fibre orientation distribution in each"
                  " voxel"),
            version=1,
            citations=[mrtrix_cite],
            **kwargs)

        # Create fod fit node
        dwi2fod = pipeline.create_node(EstimateFOD(),
                                       name='dwi2fod',
                                       requirements=[mrtrix3_req])
        dwi2fod.inputs.algorithm = algorithm
        # Gradient merge node
        fsl_grads = pipeline.create_node(MergeTuple(2), name="fsl_grads")
        # Connect nodes
        pipeline.connect(fsl_grads, 'out', dwi2fod, 'grad_fsl')
        # Connect to inputs
        pipeline.connect_input('grad_dirs', fsl_grads, 'in1')
        pipeline.connect_input('bvalues', fsl_grads, 'in2')
        pipeline.connect_input('bias_correct', dwi2fod, 'in_file')
        pipeline.connect_input('wm_response', dwi2fod, 'wm_txt')
        pipeline.connect_input('brain_mask', dwi2fod, 'mask_file')
        # Connect to outputs
        pipeline.connect_output('wm_odf', dwi2fod, 'wm_odf')
        # If multi-tissue
        if self.multi_tissue:
            pipeline.connect_input('gm_response', dwi2fod, 'gm_txt')
            pipeline.connect_input('csf_response', dwi2fod, 'csf_txt')
            dwi2fod.inputs.gm_odf = 'gm.mif'
            dwi2fod.inputs.csf_odf = 'csf.mif'
            pipeline.connect_output('gm_odf', dwi2fod, 'gm_odf')
            pipeline.connect_output('csf_odf', dwi2fod, 'csf_odf')
        # Check inputs/output are connected
        return pipeline

    def tbss_pipeline(self, **kwargs):  # @UnusedVariable
        pipeline = self.create_pipeline(
            name='tbss',
            inputs=[DatasetSpec('fa', nifti_gz_format)],
            outputs=[
                DatasetSpec('tbss_mean_fa', nifti_gz_format),
                DatasetSpec('tbss_proj_fa',
                            nifti_gz_format,
                            frequency='per_project'),
                DatasetSpec('tbss_skeleton',
                            nifti_gz_format,
                            frequency='per_project'),
                DatasetSpec('tbss_skeleton_mask',
                            nifti_gz_format,
                            frequency='per_project')
            ],
            version=1,
            citations=[tbss_cite, fsl_cite],
            **kwargs)
        # Create TBSS workflow
        tbss = create_tbss_all(name='tbss')
        # Connect inputs
        pipeline.connect_input('fa', tbss, 'inputnode.fa_list')
        # Connect outputs
        pipeline.connect_output('tbss_mean_fa', tbss, 'outputnode.meanfa_file')
        pipeline.connect_output('tbss_proj_fa', tbss,
                                'outputnode.projectedfa_file')
        pipeline.connect_output('tbss_skeleton', tbss,
                                'outputnode.skeleton_file')
        pipeline.connect_output('tbss_skeleton_mask', tbss,
                                'outputnode.skeleton_mask')
        # Check inputs/output are connected
        return pipeline

    def extract_b0_pipeline(self, **kwargs):  # @UnusedVariable
        """
        Extracts the b0 images from a DWI study and takes their mean
        """
        pipeline = self.create_pipeline(
            name='extract_b0',
            inputs=[
                DatasetSpec('bias_correct', nifti_gz_format),
                DatasetSpec('grad_dirs', fsl_bvecs_format),
                DatasetSpec('bvalues', fsl_bvals_format)
            ],
            outputs=[DatasetSpec('b0', nifti_gz_format)],
            desc="Extract b0 image from a DWI study",
            version=1,
            citations=[mrtrix_cite],
            **kwargs)
        # Gradient merge node
        fsl_grads = pipeline.create_node(MergeTuple(2), name="fsl_grads")
        # Extraction node
        extract_b0s = pipeline.create_node(ExtractDWIorB0(),
                                           name='extract_b0s',
                                           requirements=[mrtrix3_req])
        extract_b0s.inputs.bzero = True
        extract_b0s.inputs.quiet = True
        # FIXME: Need a registration step before the mean
        # Mean calculation node
        mean = pipeline.create_node(MRMath(),
                                    name="mean",
                                    requirements=[mrtrix3_req])
        mean.inputs.axis = 3
        mean.inputs.operation = 'mean'
        mean.inputs.quiet = True
        # Convert to Nifti
        mrconvert = pipeline.create_node(MRConvert(),
                                         name="output_conversion",
                                         requirements=[mrtrix3_req])
        mrconvert.inputs.out_ext = '.nii.gz'
        mrconvert.inputs.quiet = True
        # Connect inputs
        pipeline.connect_input('bias_correct', extract_b0s, 'in_file')
        pipeline.connect_input('grad_dirs', fsl_grads, 'in1')
        pipeline.connect_input('bvalues', fsl_grads, 'in2')
        # Connect between nodes
        pipeline.connect(extract_b0s, 'out_file', mean, 'in_files')
        pipeline.connect(fsl_grads, 'out', extract_b0s, 'grad_fsl')
        pipeline.connect(mean, 'out_file', mrconvert, 'in_file')
        # Connect outputs
        pipeline.connect_output('b0', mrconvert, 'out_file')
        pipeline.assert_connected()
        # Check inputs/outputs are connected
        return pipeline

    def track_gen_pipeline(self, **kwargs):
        pipeline = self.create_pipeline(
            name='extract_b0',
            inputs=[
                DatasetSpec('bias_correct', nifti_gz_format),
                DatasetSpec('grad_dirs', fsl_bvecs_format),
                DatasetSpec('bvalues', fsl_bvals_format)
            ],
            outputs=[DatasetSpec('b0', nifti_gz_format)],
            desc="Extract b0 image from a DWI study",
            version=1,
            citations=[mrtrix_cite])
        return pipeline

    def intrascan_alignment_pipeline(self, **kwargs):

        pipeline = self.create_pipeline(
            name='affine_mat_generation',
            inputs=[
                DatasetSpec('preproc', nifti_gz_format),
                DatasetSpec('eddy_par', eddy_par_format)
            ],
            outputs=[DatasetSpec('align_mats', directory_format)],
            desc=("Generation of the affine matrices for the main dwi "
                  "sequence starting from eddy motion parameters"),
            version=1,
            citations=[fsl_cite],
            **kwargs)

        aff_mat = pipeline.create_node(AffineMatrixGeneration(),
                                       name='gen_aff_mats')
        pipeline.connect_input('preproc', aff_mat, 'reference_image')
        pipeline.connect_input('eddy_par', aff_mat, 'motion_parameters')
        pipeline.connect_output('align_mats', aff_mat, 'affine_matrices')
        return pipeline
Пример #15
0
class UTEStudy(MRIStudy, metaclass=StudyMetaClass):

    add_data_specs = [
        DatasetSpec('umap', dicom_format),
        DatasetSpec('umap_nifti', nifti_gz_format,
                    'umap_dcm2nii_conversion_pipeline'),
        DatasetSpec('brain', nifti_gz_format, 'brain_extraction_pipeline'),
        DatasetSpec('ute_echo1', dicom_format),
        DatasetSpec('ute_echo2', dicom_format),
        DatasetSpec('umap_ute', dicom_format),
        DatasetSpec('ute1_registered', nifti_gz_format,
                    'registration_pipeline'),
        DatasetSpec('ute2_registered', nifti_gz_format,
                    'registration_pipeline'),
        DatasetSpec('template_to_ute_mat', text_matrix_format,
                    'registration_pipeline'),
        DatasetSpec('ute_to_template_mat', text_matrix_format,
                    'registration_pipeline'),
        DatasetSpec('air_mask', nifti_gz_format, 'segmentation_pipeline'),
        DatasetSpec('bones_mask', nifti_gz_format, 'segmentation_pipeline'),
        DatasetSpec('sute_cont_template', nifti_gz_format,
                    'umaps_calculation_pipeline'),
        DatasetSpec('sute_fix_template', nifti_gz_format,
                    'umaps_calculation_pipeline'),
        DatasetSpec('sute_fix_ute', nifti_gz_format,
                    'backwrap_to_ute_pipeline'),
        DatasetSpec('sute_cont_ute', nifti_gz_format,
                    'backwrap_to_ute_pipeline')
    ]

    add_parameter_specs = [
        ParameterSpec('bet_method',
                      'optibet',
                      choices=MRIStudy.parameter_spec('bet_method').choices)
    ]

    template_path = '/home/jakubb/template/template_template0.nii.gz'
    tpm_path = '/environment/packages/spm/12/tpm/head_tpm.nii'

    def header_info_extraction_pipeline(self, **kwargs):
        return (super(UTEStudy, self).header_info_extraction_pipeline_factory(
            'primary', **kwargs))

    def umap_dcm2nii_conversion_pipeline(self, **kwargs):
        return super(UTEStudy, self).dcm2nii_conversion_pipeline_factory(
            'umap_dcm2nii', 'umap', **kwargs)

    def registration_pipeline(self, **kwargs):  # @UnusedVariable @IgnorePep8
        """
        Register T1 and T2 to the

        Parameters
        ----------
        """
        pipeline = self.create_pipeline(
            name='registration_pipeline',
            inputs=[
                DatasetSpec('ute_echo1', dicom_format),
                DatasetSpec('ute_echo2', dicom_format)
            ],
            outputs=[
                DatasetSpec('ute1_registered', nifti_format),
                DatasetSpec('ute2_registered', nifti_gz_format),
                DatasetSpec('template_to_ute_mat', text_matrix_format),
                DatasetSpec('ute_to_template_mat', text_matrix_format)
            ],
            desc="Register ute images to the template",
            version=1,
            citations=(fsl_cite),
            **kwargs)

        echo1_conv = pipeline.create_node(MRConvert(), name='echo1_conv')
        echo1_conv.inputs.out_ext = '.nii.gz'

        pipeline.connect_input('ute_echo1', echo1_conv, 'in_file')

        echo2_conv = pipeline.create_node(MRConvert(), name='echo2_conv')
        echo2_conv.inputs.out_ext = '.nii.gz'

        pipeline.connect_input('ute_echo2', echo2_conv, 'in_file')

        # Create registration node
        registration = pipeline.create_node(FLIRT(),
                                            name='ute1_registration',
                                            requirements=[fsl5_req],
                                            wall_time=180)

        pipeline.connect(echo1_conv, 'out_file', registration, 'in_file')

        registration.inputs.reference = self.template_path
        registration.inputs.output_type = 'NIFTI_GZ'
        registration.inputs.searchr_x = [-180, 180]
        registration.inputs.searchr_y = [-180, 180]
        registration.inputs.searchr_z = [-180, 180]
        registration.inputs.bins = 256
        registration.inputs.cost_func = 'corratio'

        # Inverse matrix conversion
        convert_mat = pipeline.create_node(ConvertXFM(),
                                           name='inverse_matrix_conversion',
                                           requirements=[fsl5_req],
                                           wall_time=10)
        pipeline.connect(registration, 'out_matrix_file', convert_mat,
                         'in_file')
        convert_mat.inputs.invert_xfm = True

        # UTE_echo_2 transformation
        transform_ute2 = pipeline.create_node(ApplyXFM(),
                                              name='transform_t2',
                                              requirements=[fsl5_req],
                                              wall_time=10)
        pipeline.connect(registration, 'out_matrix_file', transform_ute2,
                         'in_matrix_file')
        pipeline.connect(echo2_conv, 'out_file', transform_ute2, 'in_file')

        transform_ute2.inputs.output_type = 'NIFTI_GZ'
        transform_ute2.inputs.reference = self.template_path
        transform_ute2.inputs.apply_xfm = True

        # Connect outputs
        pipeline.connect_output('ute1_registered', registration, 'out_file')
        pipeline.connect_output('ute_to_template_mat', registration,
                                'out_matrix_file')
        pipeline.connect_output('ute2_registered', transform_ute2, 'out_file')
        pipeline.connect_output('template_to_ute_mat', convert_mat, 'out_file')
        pipeline.assert_connected()

        return pipeline

    def segmentation_pipeline(self, **kwargs):  # @UnusedVariable @IgnorePep8

        pipeline = self.create_pipeline(
            name='ute1_segmentation',
            inputs=[DatasetSpec('ute1_registered', nifti_format)],
            outputs=[
                DatasetSpec('air_mask', nifti_gz_format),
                DatasetSpec('bones_mask', nifti_gz_format)
            ],
            desc="Segmentation of the first echo UTE image",
            version=1,
            citations=(spm_cite, matlab_cite),
            **kwargs)

        segmentation = pipeline.create_node(
            NewSegment(),
            name='ute1_registered_segmentation',
            requirements=[matlab2015_req, spm12_req],
            wall_time=480)
        pipeline.connect_input('ute1_registered', segmentation,
                               'channel_files')
        segmentation.inputs.affine_regularization = 'none'
        tissue1 = ((self.tpm_path, 1), 1, (True, False), (False, False))
        tissue2 = ((self.tpm_path, 2), 1, (True, False), (False, False))
        tissue3 = ((self.tpm_path, 3), 2, (True, False), (False, False))
        tissue4 = ((self.tpm_path, 4), 3, (True, False), (False, False))
        tissue5 = ((self.tpm_path, 5), 4, (True, False), (False, False))
        tissue6 = ((self.tpm_path, 6), 3, (True, False), (False, False))
        segmentation.inputs.tissues = [
            tissue1, tissue2, tissue3, tissue4, tissue5, tissue6
        ]

        select_bones_pm = pipeline.create_node(
            Select(),
            name='select_bones_pm_from_SPM_new_segmentation',
            requirements=[],
            wall_time=5)
        pipeline.connect(segmentation, 'native_class_images', select_bones_pm,
                         'inlist')
        select_bones_pm.inputs.index = 3

        select_air_pm = pipeline.create_node(
            Select(),
            name='select_air_pm_from_SPM_new_segmentation',
            requirements=[],
            wall_time=5)

        pipeline.connect(segmentation, 'native_class_images', select_air_pm,
                         'inlist')
        select_air_pm.inputs.index = 5

        threshold_bones = pipeline.create_node(
            Threshold(),
            name='bones_probabilistic_map_thresholding',
            requirements=[fsl5_req],
            wall_time=5)
        pipeline.connect(select_bones_pm, 'out', threshold_bones, 'in_file')
        threshold_bones.inputs.output_type = "NIFTI_GZ"
        threshold_bones.inputs.direction = 'below'
        threshold_bones.inputs.thresh = 0.2

        binarize_bones = pipeline.create_node(
            UnaryMaths(),
            name='bones_probabilistic_map_binarization',
            requirements=[fsl5_req],
            wall_time=5)
        pipeline.connect(threshold_bones, 'out_file', binarize_bones,
                         'in_file')
        binarize_bones.inputs.output_type = "NIFTI_GZ"
        binarize_bones.inputs.operation = 'bin'

        threshold_air = pipeline.create_node(
            Threshold(),
            name='air_probabilistic_maps_thresholding',
            requirements=[fsl5_req],
            wall_time=5)
        pipeline.connect(select_air_pm, 'out', threshold_air, 'in_file')
        threshold_air.inputs.output_type = "NIFTI_GZ"
        threshold_air.inputs.direction = 'below'
        threshold_air.inputs.thresh = 0.1

        binarize_air = pipeline.create_node(
            UnaryMaths(),
            name='air_probabilistic_map_binarization',
            requirements=[fsl5_req],
            wall_time=5)
        pipeline.connect(threshold_air, 'out_file', binarize_air, 'in_file')
        binarize_air.inputs.output_type = "NIFTI_GZ"
        binarize_air.inputs.operation = 'bin'

        pipeline.connect_output('bones_mask', binarize_bones, 'out_file')
        pipeline.connect_output('air_mask', binarize_air, 'out_file')
        pipeline.assert_connected()

        return pipeline

    def umaps_calculation_pipeline(self, **kwargs):

        pipeline = self.create_pipeline(
            name='core_umaps_calculation',
            inputs=[
                DatasetSpec('ute1_registered', nifti_gz_format),
                DatasetSpec('ute2_registered', nifti_gz_format),
                DatasetSpec('air_mask', nifti_gz_format),
                DatasetSpec('bones_mask', nifti_gz_format)
            ],
            outputs=[
                DatasetSpec('sute_cont_template', nifti_gz_format),
                DatasetSpec('sute_fix_template', nifti_gz_format)
            ],
            desc="Umaps calculation in the template space",
            version=1,
            citations=(matlab_cite),
            **kwargs)

        umaps_calculation = pipeline.create_node(
            CoreUmapCalc(),
            name='umaps_calculation_based_on_masks_and_r2star',
            requirements=[matlab2015_req],
            wall_time=20)
        pipeline.connect_input('ute1_registered', umaps_calculation,
                               'ute1_reg')
        pipeline.connect_input('ute2_registered', umaps_calculation,
                               'ute2_reg')
        pipeline.connect_input('air_mask', umaps_calculation, 'air__mask')
        pipeline.connect_input('bones_mask', umaps_calculation, 'bones__mask')

        pipeline.connect_output('sute_cont_template', umaps_calculation,
                                'sute_cont_template')
        pipeline.connect_output('sute_fix_template', umaps_calculation,
                                'sute_fix_template')
        pipeline.assert_connected()

        return pipeline

    def backwrap_to_ute_pipeline(self, **kwargs):

        pipeline = self.create_pipeline(
            name='backwrap_to_ute',
            inputs=[
                DatasetSpec('ute1_registered', nifti_gz_format),
                DatasetSpec('ute_echo1', dicom_format),
                DatasetSpec('umap_ute', dicom_format),
                DatasetSpec('template_to_ute_mat', text_matrix_format),
                DatasetSpec('sute_cont_template', nifti_gz_format),
                DatasetSpec('sute_fix_template', nifti_gz_format)
            ],
            outputs=[
                DatasetSpec('sute_cont_ute', nifti_gz_format),
                DatasetSpec('sute_fix_ute', nifti_gz_format)
            ],
            desc="Moving umaps back to the UTE space",
            version=1,
            citations=(matlab_cite),
            **kwargs)

        echo1_conv = pipeline.create_node(MRConvert(), name='echo1_conv')
        echo1_conv.inputs.out_ext = '.nii.gz'
        pipeline.connect_input('ute_echo1', echo1_conv, 'in_file')

        umap_conv = pipeline.create_node(MRConvert(), name='umap_conv')
        umap_conv.inputs.out_ext = '.nii.gz'
        pipeline.connect_input('umap_ute', umap_conv, 'in_file')

        zero_template_mask = pipeline.create_node(BinaryMaths(),
                                                  name='zero_template_mask',
                                                  requirements=[fsl5_req],
                                                  wall_time=3)
        pipeline.connect_input('ute1_registered', zero_template_mask,
                               'in_file')
        zero_template_mask.inputs.operation = "mul"
        zero_template_mask.inputs.operand_value = 0
        zero_template_mask.inputs.output_type = 'NIFTI_GZ'

        region_template_mask = pipeline.create_node(
            FLIRT(),
            name='region_template_mask',
            requirements=[fsl5_req],
            wall_time=5)
        region_template_mask.inputs.apply_xfm = True
        region_template_mask.inputs.bgvalue = 1
        region_template_mask.inputs.interp = 'nearestneighbour'
        region_template_mask.inputs.output_type = 'NIFTI_GZ'
        pipeline.connect(zero_template_mask, 'out_file', region_template_mask,
                         'in_file')
        pipeline.connect(echo1_conv, 'out_file', region_template_mask,
                         'reference')
        pipeline.connect_input('template_to_ute_mat', region_template_mask,
                               'in_matrix_file')

        fill_in_umap = pipeline.create_node(MultiImageMaths(),
                                            name='fill_in_umap',
                                            requirements=[fsl5_req],
                                            wall_time=3)
        fill_in_umap.inputs.op_string = "-mul %s "
        fill_in_umap.inputs.output_type = 'NIFTI_GZ'
        pipeline.connect(region_template_mask, 'out_file', fill_in_umap,
                         'in_file')
        pipeline.connect(umap_conv, 'out_file', fill_in_umap, 'operand_files')

        sute_fix_ute_space = pipeline.create_node(FLIRT(),
                                                  name='sute_fix_ute_space',
                                                  requirements=[fsl5_req],
                                                  wall_time=5)
        pipeline.connect(echo1_conv, 'out_file', sute_fix_ute_space,
                         'reference')
        pipeline.connect_input('template_to_ute_mat', sute_fix_ute_space,
                               'in_matrix_file')
        pipeline.connect_input('sute_fix_template', sute_fix_ute_space,
                               'in_file')
        sute_fix_ute_space.inputs.apply_xfm = True
        sute_fix_ute_space.inputs.bgvalue = 0
        sute_fix_ute_space.inputs.output_type = 'NIFTI_GZ'

        sute_cont_ute_space = pipeline.create_node(FLIRT(),
                                                   name='sute_cont_ute_space',
                                                   requirements=[fsl5_req],
                                                   wall_time=5)
        pipeline.connect(echo1_conv, 'out_file', sute_cont_ute_space,
                         'reference')
        pipeline.connect_input('template_to_ute_mat', sute_cont_ute_space,
                               'in_matrix_file')
        pipeline.connect_input('sute_cont_template', sute_cont_ute_space,
                               'in_file')
        sute_cont_ute_space.inputs.apply_xfm = True
        sute_cont_ute_space.inputs.bgvalue = 0
        sute_cont_ute_space.inputs.output_type = 'NIFTI_GZ'

        sute_fix_ute_background = pipeline.create_node(
            MultiImageMaths(),
            name='sute_fix_ute_background',
            requirements=[fsl5_req],
            wall_time=5)
        pipeline.connect(sute_fix_ute_space, 'out_file',
                         sute_fix_ute_background, 'in_file')
        sute_fix_ute_background.inputs.op_string = "-add %s "
        sute_fix_ute_background.inputs.output_type = 'NIFTI_GZ'
        pipeline.connect(fill_in_umap, 'out_file', sute_fix_ute_background,
                         'operand_files')

        sute_cont_ute_background = pipeline.create_node(
            MultiImageMaths(),
            name='sute_cont_ute_background',
            requirements=[fsl5_req],
            wall_time=5)
        pipeline.connect(sute_cont_ute_space, 'out_file',
                         sute_cont_ute_background, 'in_file')
        sute_cont_ute_background.inputs.op_string = "-add %s "
        sute_cont_ute_background.inputs.output_type = 'NIFTI_GZ'
        pipeline.connect(fill_in_umap, 'out_file', sute_cont_ute_background,
                         'operand_files')

        smooth_sute_fix = pipeline.create_node(Smooth(),
                                               name='smooth_sute_fix',
                                               requirements=[fsl5_req],
                                               wall_time=5)
        smooth_sute_fix.inputs.sigma = 2.
        pipeline.connect(sute_fix_ute_background, 'out_file', smooth_sute_fix,
                         'in_file')

        smooth_sute_cont = pipeline.create_node(Smooth(),
                                                name='smooth_sute_cont',
                                                requirements=[fsl5_req],
                                                wall_time=5)
        smooth_sute_cont.inputs.sigma = 2.
        pipeline.connect(sute_cont_ute_background, 'out_file',
                         smooth_sute_cont, 'in_file')

        pipeline.connect_output('sute_fix_ute', smooth_sute_fix,
                                'smoothed_file')
        pipeline.connect_output('sute_cont_ute', smooth_sute_cont,
                                'smoothed_file')
        pipeline.assert_connected()

        return pipeline
Пример #16
0
class MRIStudy(Study, metaclass=StudyMetaClass):

    add_data_specs = [
        DatasetSpec('primary', dicom_format),
        DatasetSpec('coreg_ref_brain',
                    nifti_gz_format,
                    desc=("A reference scan to coregister the primary "
                          "scan to. Should be brain extracted"),
                    optional=True),
        DatasetSpec('coreg_matrix', text_matrix_format,
                    'linear_coregistration_pipeline'),
        DatasetSpec('preproc', nifti_gz_format, 'preproc_pipeline'),
        DatasetSpec('brain',
                    nifti_gz_format,
                    'brain_extraction_pipeline',
                    desc="The brain masked image"),
        DatasetSpec('brain_mask',
                    nifti_gz_format,
                    'brain_extraction_pipeline',
                    desc="Mask of the brain"),
        DatasetSpec('coreg_brain',
                    nifti_gz_format,
                    'linear_coregistration_pipeline',
                    desc=""),
        DatasetSpec('coreg_to_atlas', nifti_gz_format,
                    'coregister_to_atlas_pipeline'),
        DatasetSpec('coreg_to_atlas_coeff', nifti_gz_format,
                    'coregister_to_atlas_pipeline'),
        DatasetSpec('coreg_to_atlas_mat', text_matrix_format,
                    'coregister_to_atlas_pipeline'),
        DatasetSpec('coreg_to_atlas_warp', nifti_gz_format,
                    'coregister_to_atlas_pipeline'),
        DatasetSpec('coreg_to_atlas_report', gif_format,
                    'coregister_to_atlas_pipeline'),
        DatasetSpec('wm_seg', nifti_gz_format, 'segmentation_pipeline'),
        DatasetSpec('dcm_info', text_format,
                    'header_info_extraction_pipeline'),
        DatasetSpec('motion_mats', motion_mats_format, 'motion_mat_pipeline'),
        DatasetSpec('qformed', nifti_gz_format, 'qform_transform_pipeline'),
        DatasetSpec('qform_mat', text_matrix_format,
                    'qform_transform_pipeline'),
        FieldSpec('tr', float, 'header_info_extraction_pipeline'),
        FieldSpec('start_time', str, 'header_info_extraction_pipeline'),
        FieldSpec('real_duration', str, 'header_info_extraction_pipeline'),
        FieldSpec('tot_duration', str, 'header_info_extraction_pipeline'),
        FieldSpec('ped', str, 'header_info_extraction_pipeline'),
        FieldSpec('pe_angle', str, 'header_info_extraction_pipeline')
    ]

    add_parameter_specs = [
        ParameterSpec('bet_robust', True),
        ParameterSpec('bet_f_threshold', 0.5),
        ParameterSpec('bet_reduce_bias', False),
        ParameterSpec('bet_g_threshold', 0.0),
        ParameterSpec('MNI_template',
                      os.path.join(atlas_path, 'MNI152_T1_2mm.nii.gz')),
        ParameterSpec('MNI_template_brain',
                      os.path.join(atlas_path, 'MNI152_T1_2mm_brain.nii.gz')),
        ParameterSpec(
            'MNI_template_mask',
            os.path.join(atlas_path, 'MNI152_T1_2mm_brain_mask.nii.gz')),
        ParameterSpec('optibet_gen_report', False),
        ParameterSpec('fnirt_atlas', 'MNI152'),
        ParameterSpec('fnirt_resolution', '2mm'),
        ParameterSpec('fnirt_intensity_model', 'global_non_linear_with_bias'),
        ParameterSpec('fnirt_subsampling', [4, 4, 2, 2, 1, 1]),
        ParameterSpec('preproc_new_dims', ('RL', 'AP', 'IS')),
        ParameterSpec('preproc_resolution', None, dtype=list),
        ParameterSpec(
            'flirt_degrees_of_freedom',
            6,
            desc=("Number of degrees of freedom used in the registration. "
                  "Default is 6 -> affine transformation.")),
        ParameterSpec(
            'flirt_cost_func',
            'normmi',
            desc=("Cost function used for the registration. Can be one of "
                  "'mutualinfo', 'corratio', 'normcorr', 'normmi', 'leastsq',"
                  " 'labeldiff', 'bbr'")),
        ParameterSpec(
            'flirt_qsform',
            False,
            desc=("Whether to use the QS form supplied in the input image "
                  "header (the image coordinates of the FOV supplied by the "
                  "scanner"))
    ]

    add_switch_specs = [
        SwitchSpec('linear_reg_method',
                   'flirt',
                   choices=('flirt', 'spm', 'ants')),
        SwitchSpec('atlas_coreg_tool', 'ants', choices=('fnirt', 'ants')),
        SwitchSpec('bet_method', 'fsl_bet', choices=('fsl_bet', 'optibet'))
    ]

    @property
    def coreg_brain_spec(self):
        """
        The name of the dataset after registration has been applied.
        If registration is not required, i.e. a reg_ref is not supplied
        then it is simply the 'brain' dataset.
        """
        if 'coreg_ref_brain' in self.input_names:
            name = 'coreg_brain'
        else:
            name = 'brain'
        return DatasetSpec(name, nifti_gz_format)

    def linear_coregistration_pipeline(self, **kwargs):
        if self.branch('linear_reg_method', 'flirt'):
            pipeline = self._flirt_factory('linear_coreg', 'brain',
                                           'coreg_ref_brain', 'coreg_brain',
                                           'coreg_matrix', **kwargs)
        elif self.branch('linear_reg_method', 'ants'):
            pipeline = self._ants_linear_coreg_pipeline(
                'linear_coreg', 'brain', 'coreg_ref_brain', 'coreg_brain',
                'coreg_matrix', **kwargs)
        elif self.branch('linear_reg_method', 'spm'):
            raise NotImplementedError
        else:
            self.unhandled_branch('linear_reg_method')
        return pipeline

    def qform_transform_pipeline(self, **kwargs):
        return self._qform_transform_factory('qform_transform', 'brain',
                                             'coreg_ref_brain', 'qformed',
                                             'qform_mat', **kwargs)

    def _flirt_factory(self, name, to_reg, ref, reg, matrix, **kwargs):
        """
        Registers a MR scan to a refernce MR scan using FSL's FLIRT command

        Parameters
        ----------
        name : str
            Name for the generated pipeline
        to_reg : str
            Name of the DatasetSpec to register
        ref : str
            Name of the DatasetSpec to use as a reference
        reg : str
            Name of the DatasetSpec to output as registered image
        matrix : str
            Name of the DatasetSpec to output as registration matrix
        """

        pipeline = self.create_pipeline(
            name=name,
            inputs=[
                DatasetSpec(to_reg, nifti_gz_format),
                DatasetSpec(ref, nifti_gz_format)
            ],
            outputs=[
                DatasetSpec(reg, nifti_gz_format),
                DatasetSpec(matrix, text_matrix_format)
            ],
            desc="Registers a MR scan against a reference image using FLIRT",
            version=1,
            citations=[fsl_cite],
            **kwargs)
        flirt = pipeline.create_node(interface=FLIRT(),
                                     name='flirt',
                                     requirements=[fsl5_req],
                                     wall_time=5)

        # Set registration parameters
        flirt.inputs.dof = self.parameter('flirt_degrees_of_freedom')
        flirt.inputs.cost = self.parameter('flirt_cost_func')
        flirt.inputs.cost_func = self.parameter('flirt_cost_func')
        flirt.inputs.output_type = 'NIFTI_GZ'
        # Connect inputs
        pipeline.connect_input(to_reg, flirt, 'in_file')
        pipeline.connect_input(ref, flirt, 'reference')
        # Connect outputs
        pipeline.connect_output(reg, flirt, 'out_file')
        pipeline.connect_output(matrix, flirt, 'out_matrix_file')
        return pipeline

    def _qform_transform_factory(self, name, to_reg, ref, qformed, qformed_mat,
                                 **kwargs):
        pipeline = self.create_pipeline(
            name=name,
            inputs=[
                DatasetSpec(to_reg, nifti_gz_format),
                DatasetSpec(ref, nifti_gz_format)
            ],
            outputs=[
                DatasetSpec(qformed, nifti_gz_format),
                DatasetSpec(qformed_mat, text_matrix_format)
            ],
            desc="Registers a MR scan against a reference image",
            version=1,
            citations=[fsl_cite],
            **kwargs)
        flirt = pipeline.create_node(interface=FLIRT(),
                                     name='flirt',
                                     requirements=[fsl5_req],
                                     wall_time=5)
        flirt.inputs.uses_qform = True
        flirt.inputs.apply_xfm = True
        # Connect inputs
        pipeline.connect_input(to_reg, flirt, 'in_file')
        pipeline.connect_input(ref, flirt, 'reference')
        # Connect outputs
        pipeline.connect_output(qformed, flirt, 'out_file')
        pipeline.connect_output(qformed_mat, flirt, 'out_matrix_file')
        return pipeline

    def _spm_coreg_pipeline(self, **kwargs):  # @UnusedVariable
        """
        Coregisters T2 image to T1 image using SPM's
        "Register" method.

        NB: Default values come from the W2MHS toolbox
        """
        pipeline = self.create_pipeline(
            name='registration',
            inputs=[
                DatasetSpec('t1', nifti_format),
                DatasetSpec('t2', nifti_format)
            ],
            outputs=[DatasetSpec('t2_coreg_t1', nifti_format)],
            desc="Coregister T2-weighted images to T1",
            version=1,
            citations=[spm_cite],
            **kwargs)
        coreg = pipeline.create_node(Coregister(),
                                     name='coreg',
                                     requirements=[spm12_req],
                                     wall_time=30)
        coreg.inputs.jobtype = 'estwrite'
        coreg.inputs.cost_function = 'nmi'
        coreg.inputs.separation = [4, 2]
        coreg.inputs.tolerance = [
            0.02, 0.02, 0.02, 0.001, 0.001, 0.001, 0.01, 0.01, 0.01, 0.001,
            0.001, 0.001
        ]
        coreg.inputs.fwhm = [7, 7]
        coreg.inputs.write_interp = 4
        coreg.inputs.write_wrap = [0, 0, 0]
        coreg.inputs.write_mask = False
        coreg.inputs.out_prefix = 'r'
        # Connect inputs
        pipeline.connect_input('t1', coreg, 'target')
        pipeline.connect_input('t2', coreg, 'source')
        # Connect outputs
        pipeline.connect_output('t2_coreg_t1', coreg, 'coregistered_source')
        return pipeline

    def _ants_linear_coreg_pipeline(self, name, to_reg, ref, reg, matrix,
                                    **kwargs):

        pipeline = self.create_pipeline(
            name=name,
            inputs=[
                DatasetSpec(to_reg, nifti_gz_format),
                DatasetSpec(ref, nifti_gz_format)
            ],
            outputs=[
                DatasetSpec(reg, nifti_gz_format),
                DatasetSpec(matrix, text_matrix_format)
            ],
            desc="Registers a MR scan against a reference image using ANTs",
            version=1,
            citations=[],
            **kwargs)

        ants_linear = pipeline.create_node(AntsRegSyn(num_dimensions=3,
                                                      transformation='r',
                                                      out_prefix='reg2hires'),
                                           name='ANTs_linear_Reg',
                                           wall_time=10,
                                           requirements=[ants2_req])
        pipeline.connect_input(ref, ants_linear, 'ref_file')
        pipeline.connect_input(to_reg, ants_linear, 'input_file')

        pipeline.connect_output(reg, ants_linear, 'reg_file')
        pipeline.connect_output(matrix, ants_linear, 'regmat')

        return pipeline

    def brain_extraction_pipeline(self, in_file='preproc', **kwargs):
        if self.branch('bet_method', 'fsl_bet'):
            pipeline = self._fsl_bet_brain_extraction_pipeline(
                in_file, **kwargs)
        elif self.branch('bet_method', 'optibet'):
            pipeline = self._optiBET_brain_extraction_pipeline(
                in_file, **kwargs)
        else:
            self.unhandled_branch('bet_method')
        return pipeline

    def _fsl_bet_brain_extraction_pipeline(self, in_file, **kwargs):
        """
        Generates a whole brain mask using FSL's BET command.
        """
        pipeline = self.create_pipeline(
            name='brain_extraction',
            inputs=[DatasetSpec(in_file, nifti_gz_format)],
            outputs=[
                DatasetSpec('brain', nifti_gz_format),
                DatasetSpec('brain_mask', nifti_gz_format)
            ],
            desc="Generate brain mask from mr_scan",
            version=1,
            citations=[fsl_cite, bet_cite, bet2_cite],
            **kwargs)
        # Create mask node
        bet = pipeline.create_node(interface=fsl.BET(),
                                   name="bet",
                                   requirements=[fsl509_req])
        bet.inputs.mask = True
        bet.inputs.output_type = 'NIFTI_GZ'
        if self.parameter('bet_robust'):
            bet.inputs.robust = True
        if self.parameter('bet_reduce_bias'):
            bet.inputs.reduce_bias = True
        bet.inputs.frac = self.parameter('bet_f_threshold')
        bet.inputs.vertical_gradient = self.parameter('bet_g_threshold')
        # Connect inputs/outputs
        pipeline.connect_input(in_file, bet, 'in_file')
        pipeline.connect_output('brain', bet, 'out_file')
        pipeline.connect_output('brain_mask', bet, 'mask_file')
        return pipeline

    def _optiBET_brain_extraction_pipeline(self, in_file, **kwargs):
        """
        Generates a whole brain mask using a modified optiBET approach.
        """

        outputs = [
            DatasetSpec('brain', nifti_gz_format),
            DatasetSpec('brain_mask', nifti_gz_format)
        ]
        if self.parameter('optibet_gen_report'):
            outputs.append(DatasetSpec('optiBET_report', gif_format))
        pipeline = self.create_pipeline(
            name='brain_extraction',
            inputs=[DatasetSpec(in_file, nifti_gz_format)],
            outputs=outputs,
            desc=("Modified implementation of optiBET.sh"),
            version=1,
            citations=[fsl_cite],
            **kwargs)

        mni_reg = pipeline.create_node(AntsRegSyn(num_dimensions=3,
                                                  transformation='s',
                                                  out_prefix='T12MNI',
                                                  num_threads=4),
                                       name='T1_reg',
                                       wall_time=25,
                                       requirements=[ants2_req])
        mni_reg.inputs.ref_file = self.parameter('MNI_template')
        pipeline.connect_input(in_file, mni_reg, 'input_file')

        merge_trans = pipeline.create_node(Merge(2),
                                           name='merge_transforms',
                                           wall_time=1)
        pipeline.connect(mni_reg, 'inv_warp', merge_trans, 'in1')
        pipeline.connect(mni_reg, 'regmat', merge_trans, 'in2')

        trans_flags = pipeline.create_node(Merge(2),
                                           name='trans_flags',
                                           wall_time=1)
        trans_flags.inputs.in1 = False
        trans_flags.inputs.in2 = True

        apply_trans = pipeline.create_node(ApplyTransforms(),
                                           name='ApplyTransform',
                                           wall_time=7,
                                           memory=24000,
                                           requirements=[ants2_req])
        apply_trans.inputs.input_image = self.parameter('MNI_template_mask')
        apply_trans.inputs.interpolation = 'NearestNeighbor'
        apply_trans.inputs.input_image_type = 3
        pipeline.connect(merge_trans, 'out', apply_trans, 'transforms')
        pipeline.connect(trans_flags, 'out', apply_trans,
                         'invert_transform_flags')
        pipeline.connect_input(in_file, apply_trans, 'reference_image')

        maths1 = pipeline.create_node(fsl.ImageMaths(
            suffix='_optiBET_brain_mask', op_string='-bin'),
                                      name='binarize',
                                      wall_time=5,
                                      requirements=[fsl5_req])
        pipeline.connect(apply_trans, 'output_image', maths1, 'in_file')
        maths2 = pipeline.create_node(fsl.ImageMaths(suffix='_optiBET_brain',
                                                     op_string='-mas'),
                                      name='mask',
                                      wall_time=5,
                                      requirements=[fsl5_req])
        pipeline.connect_input(in_file, maths2, 'in_file')
        pipeline.connect(maths1, 'out_file', maths2, 'in_file2')
        if self.parameter('optibet_gen_report'):
            slices = pipeline.create_node(FSLSlices(),
                                          name='slices',
                                          wall_time=5,
                                          requirements=[fsl5_req])
            slices.inputs.outname = 'optiBET_report'
            pipeline.connect_input(in_file, slices, 'im1')
            pipeline.connect(maths2, 'out_file', slices, 'im2')
            pipeline.connect_output('optiBET_report', slices, 'report')

        pipeline.connect_output('brain_mask', maths1, 'out_file')
        pipeline.connect_output('brain', maths2, 'out_file')

        return pipeline

    def coregister_to_atlas_pipeline(self, **kwargs):
        if self.branch('atlas_coreg_tool', 'fnirt'):
            pipeline = self._fsl_fnirt_to_atlas_pipeline(**kwargs)
        elif self.branch('atlas_coreg_tool', 'ants'):
            pipeline = self._ants_to_atlas_pipeline(**kwargs)
        else:
            self.unhandled_branch('atlas_coreg_tool')
        return pipeline

    # @UnusedVariable @IgnorePep8
    def _fsl_fnirt_to_atlas_pipeline(self, **kwargs):
        """
        Registers a MR scan to a refernce MR scan using FSL's nonlinear FNIRT
        command

        Parameters
        ----------
        atlas : Which atlas to use, can be one of 'mni_nl6'
        """
        pipeline = self.create_pipeline(
            name='coregister_to_atlas',
            inputs=[
                DatasetSpec('preproc', nifti_gz_format),
                DatasetSpec('brain_mask', nifti_gz_format),
                DatasetSpec('brain', nifti_gz_format)
            ],
            outputs=[
                DatasetSpec('coreg_to_atlas', nifti_gz_format),
                DatasetSpec('coreg_to_atlas_coeff', nifti_gz_format)
            ],
            desc=("Nonlinearly registers a MR scan to a standard space,"
                  "e.g. MNI-space"),
            version=1,
            citations=[fsl_cite],
            **kwargs)
        # Get the reference atlas from FSL directory
        ref_atlas = get_atlas_path(self.parameter('fnirt_atlas'),
                                   'image',
                                   resolution=self.parameter('resolution'))
        ref_mask = get_atlas_path(self.parameter('fnirt_atlas'),
                                  'mask_dilated',
                                  resolution=self.parameter('resolution'))
        ref_brain = get_atlas_path(self.parameter('fnirt_atlas'),
                                   'brain',
                                   resolution=self.parameter('resolution'))
        # Basic reorientation to standard MNI space
        reorient = pipeline.create_node(Reorient2Std(),
                                        name='reorient',
                                        requirements=[fsl5_req])
        reorient.inputs.output_type = 'NIFTI_GZ'
        reorient_mask = pipeline.create_node(Reorient2Std(),
                                             name='reorient_mask',
                                             requirements=[fsl5_req])
        reorient_mask.inputs.output_type = 'NIFTI_GZ'
        reorient_brain = pipeline.create_node(Reorient2Std(),
                                              name='reorient_brain',
                                              requirements=[fsl5_req])
        reorient_brain.inputs.output_type = 'NIFTI_GZ'
        # Affine transformation to MNI space
        flirt = pipeline.create_node(interface=FLIRT(),
                                     name='flirt',
                                     requirements=[fsl5_req],
                                     wall_time=5)
        flirt.inputs.reference = ref_brain
        flirt.inputs.dof = 12
        flirt.inputs.output_type = 'NIFTI_GZ'
        # Nonlinear transformation to MNI space
        fnirt = pipeline.create_node(interface=FNIRT(),
                                     name='fnirt',
                                     requirements=[fsl5_req],
                                     wall_time=60)
        fnirt.inputs.ref_file = ref_atlas
        fnirt.inputs.refmask_file = ref_mask
        fnirt.inputs.output_type = 'NIFTI_GZ'
        intensity_model = self.parameter('fnirt_intensity_model')
        if intensity_model is None:
            intensity_model = 'none'
        fnirt.inputs.intensity_mapping_model = intensity_model
        fnirt.inputs.subsampling_scheme = self.parameter('fnirt_subsampling')
        fnirt.inputs.fieldcoeff_file = True
        fnirt.inputs.in_fwhm = [8, 6, 5, 4.5, 3, 2]
        fnirt.inputs.ref_fwhm = [8, 6, 5, 4, 2, 0]
        fnirt.inputs.regularization_lambda = [300, 150, 100, 50, 40, 30]
        fnirt.inputs.apply_intensity_mapping = [1, 1, 1, 1, 1, 0]
        fnirt.inputs.max_nonlin_iter = [5, 5, 5, 5, 5, 10]
        # Apply mask if corresponding subsampling scheme is 1
        # (i.e. 1-to-1 resolution) otherwise don't.
        apply_mask = [int(s == 1) for s in self.parameter('fnirt_subsampling')]
        fnirt.inputs.apply_inmask = apply_mask
        fnirt.inputs.apply_refmask = apply_mask
        # Connect nodes
        pipeline.connect(reorient_brain, 'out_file', flirt, 'in_file')
        pipeline.connect(reorient, 'out_file', fnirt, 'in_file')
        pipeline.connect(reorient_mask, 'out_file', fnirt, 'inmask_file')
        pipeline.connect(flirt, 'out_matrix_file', fnirt, 'affine_file')
        # Set registration parameters
        # TOD: Need to work out which parameters to use
        # Connect inputs
        pipeline.connect_input('preproc', reorient, 'in_file')
        pipeline.connect_input('brain_mask', reorient_mask, 'in_file')
        pipeline.connect_input('brain', reorient_brain, 'in_file')
        # Connect outputs
        pipeline.connect_output('coreg_to_atlas', fnirt, 'warped_file')
        pipeline.connect_output('coreg_to_atlas_coeff', fnirt,
                                'fieldcoeff_file')
        return pipeline

    def _ants_to_atlas_pipeline(self, **kwargs):

        pipeline = self.create_pipeline(
            name='coregister_to_atlas',
            inputs=[DatasetSpec('coreg_ref_brain', nifti_gz_format)],
            outputs=[
                DatasetSpec('coreg_to_atlas', nifti_gz_format),
                DatasetSpec('coreg_to_atlas_mat', text_matrix_format),
                DatasetSpec('coreg_to_atlas_warp', nifti_gz_format),
                DatasetSpec('coreg_to_atlas_report', gif_format)
            ],
            desc=("Nonlinearly registers a MR scan to a standard space,"
                  "e.g. MNI-space"),
            version=1,
            citations=[fsl_cite],
            **kwargs)
        ants_reg = pipeline.create_node(AntsRegSyn(num_dimensions=3,
                                                   transformation='s',
                                                   out_prefix='Struct2MNI',
                                                   num_threads=4),
                                        name='Struct2MNI_reg',
                                        wall_time=25,
                                        requirements=[ants2_req])

        ref_brain = self.parameter('MNI_template_brain')
        ants_reg.inputs.ref_file = ref_brain
        pipeline.connect_input('coreg_ref_brain', ants_reg, 'input_file')

        slices = pipeline.create_node(FSLSlices(),
                                      name='slices',
                                      wall_time=1,
                                      requirements=[fsl5_req])
        slices.inputs.outname = 'coreg_to_atlas_report'
        slices.inputs.im1 = self.parameter('MNI_template')
        pipeline.connect(ants_reg, 'reg_file', slices, 'im2')

        pipeline.connect_output('coreg_to_atlas', ants_reg, 'reg_file')
        pipeline.connect_output('coreg_to_atlas_mat', ants_reg, 'regmat')
        pipeline.connect_output('coreg_to_atlas_warp', ants_reg, 'warp_file')
        pipeline.connect_output('coreg_to_atlas_report', slices, 'report')

        return pipeline

    def segmentation_pipeline(self, img_type=2, **kwargs):
        pipeline = self.create_pipeline(
            name='FAST_segmentation',
            inputs=[DatasetSpec('brain', nifti_gz_format)],
            outputs=[DatasetSpec('wm_seg', nifti_gz_format)],
            desc="White matter segmentation of the reference image",
            version=1,
            citations=[fsl_cite],
            **kwargs)

        fast = pipeline.create_node(fsl.FAST(),
                                    name='fast',
                                    requirements=[fsl509_req])
        fast.inputs.img_type = img_type
        fast.inputs.segments = True
        fast.inputs.out_basename = 'Reference_segmentation'
        pipeline.connect_input('brain', fast, 'in_files')
        split = pipeline.create_node(Split(), name='split')
        split.inputs.splits = [1, 1, 1]
        split.inputs.squeeze = True
        pipeline.connect(fast, 'tissue_class_files', split, 'inlist')
        if img_type == 1:
            pipeline.connect_output('wm_seg', split, 'out3')
        elif img_type == 2:
            pipeline.connect_output('wm_seg', split, 'out2')
        else:
            raise ArcanaUsageError(
                "'img_type' parameter can either be 1 or 2 (not {})".format(
                    img_type))

        return pipeline

    def preproc_pipeline(self, in_file_name='primary', **kwargs):
        """
        Performs basic preprocessing, such as swapping dimensions into
        standard orientation and resampling (if required)

        Parameters
        -------
        new_dims : tuple(str)[3]
            A 3-tuple with the new orientation of the image (see FSL
            swap dim)
        resolution : list(float)[3] | None
            New resolution of the image. If None no resampling is
            performed
        """
        pipeline = self.create_pipeline(
            name='preproc_pipeline',
            inputs=[DatasetSpec(in_file_name, nifti_gz_format)],
            outputs=[DatasetSpec('preproc', nifti_gz_format)],
            desc=("Dimensions swapping to ensure that all the images "
                  "have the same orientations."),
            version=1,
            citations=[fsl_cite],
            **kwargs)
        swap = pipeline.create_node(fsl.utils.Reorient2Std(),
                                    name='fslreorient2std',
                                    requirements=[fsl509_req])
        #         swap.inputs.new_dims = self.parameter('preproc_new_dims')
        pipeline.connect_input(in_file_name, swap, 'in_file')
        if self.parameter('preproc_resolution') is not None:
            resample = pipeline.create_node(MRResize(),
                                            name="resample",
                                            requirements=[mrtrix3_req])
            resample.inputs.voxel = self.parameter('preproc_resolution')
            pipeline.connect(swap, 'out_file', resample, 'in_file')
            pipeline.connect_output('preproc', resample, 'out_file')
        else:
            pipeline.connect_output('preproc', swap, 'out_file')

        return pipeline

    def header_info_extraction_pipeline(self, **kwargs):
        if self.input('primary').format != dicom_format:
            raise ArcanaUsageError(
                "Can only extract header info if 'primary' dataset "
                "is provided in DICOM format ({})".format(
                    self.input('primary').format))
        return self.header_info_extraction_pipeline_factory(
            'header_info_extraction', 'primary', **kwargs)

    def header_info_extraction_pipeline_factory(self,
                                                name,
                                                dcm_in_name,
                                                multivol=False,
                                                output_prefix='',
                                                **kwargs):

        tr = output_prefix + 'tr'
        start_time = output_prefix + 'start_time'
        tot_duration = output_prefix + 'tot_duration'
        real_duration = output_prefix + 'real_duration'
        ped = output_prefix + 'ped'
        pe_angle = output_prefix + 'pe_angle'
        dcm_info = output_prefix + 'dcm_info'
        outputs = [
            FieldSpec(tr, dtype=float),
            FieldSpec(start_time, dtype=str),
            FieldSpec(tot_duration, dtype=str),
            FieldSpec(real_duration, dtype=str),
            FieldSpec(ped, dtype=str),
            FieldSpec(pe_angle, dtype=str),
            DatasetSpec(dcm_info, text_format)
        ]

        pipeline = self.create_pipeline(
            name=name,
            inputs=[DatasetSpec(dcm_in_name, dicom_format)],
            outputs=outputs,
            desc=("Pipeline to extract the most important scan "
                  "information from the image header"),
            version=1,
            citations=[],
            **kwargs)
        hd_extraction = pipeline.create_node(DicomHeaderInfoExtraction(),
                                             name='hd_info_extraction')
        hd_extraction.inputs.multivol = multivol
        pipeline.connect_input(dcm_in_name, hd_extraction, 'dicom_folder')
        pipeline.connect_output(tr, hd_extraction, 'tr')
        pipeline.connect_output(start_time, hd_extraction, 'start_time')
        pipeline.connect_output(tot_duration, hd_extraction, 'tot_duration')
        pipeline.connect_output(real_duration, hd_extraction, 'real_duration')
        pipeline.connect_output(ped, hd_extraction, 'ped')
        pipeline.connect_output(pe_angle, hd_extraction, 'pe_angle')
        pipeline.connect_output(dcm_info, hd_extraction, 'dcm_info')
        return pipeline

    def motion_mat_pipeline(self, **kwargs):
        if not self.spec('coreg_matrix').derivable:
            logger.info("Cannot derive 'coreg_matrix' for {} required for "
                        "motion matrix calculation, assuming that it "
                        "is the reference study".format(self))
            inputs = [DatasetSpec('primary', dicom_format)]
            ref = True
        else:
            inputs = [
                DatasetSpec('coreg_matrix', text_matrix_format),
                DatasetSpec('qform_mat', text_matrix_format)
            ]
            if 'align_mats' in self.data_spec_names():
                inputs.append(DatasetSpec('align_mats', directory_format))
            ref = False
        pipeline = self.create_pipeline(
            name='motion_mat_calculation',
            inputs=inputs,
            outputs=[DatasetSpec('motion_mats', motion_mats_format)],
            desc=("Motion matrices calculation"),
            version=1,
            citations=[fsl_cite],
            **kwargs)

        mm = pipeline.create_node(MotionMatCalculation(), name='motion_mats')
        if ref:
            mm.inputs.reference = True
            pipeline.connect_input('primary', mm, 'dummy_input')
        else:
            pipeline.connect_input('coreg_matrix', mm, 'reg_mat')
            pipeline.connect_input('qform_mat', mm, 'qform_mat')
            if 'align_mats' in self.data_spec_names():
                pipeline.connect_input('align_mats', mm, 'align_mats')
        pipeline.connect_output('motion_mats', mm, 'motion_mats')
        return pipeline