Exemple #1
0
def test_JSONFileSink_outputs():
    output_map = dict(out_file=dict(), )
    outputs = JSONFileSink.output_spec()

    for key, metadata in output_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(outputs.traits()[key], metakey), value
def test_JSONFileSink_outputs():
    output_map = dict(out_file=dict(),
    )
    outputs = JSONFileSink.output_spec()

    for key, metadata in output_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(outputs.traits()[key], metakey), value
Exemple #3
0
def test_JSONFileSink_inputs():
    input_map = dict(
        _outputs=dict(usedefault=True, ),
        ignore_exception=dict(
            nohash=True,
            usedefault=True,
        ),
        in_dict=dict(usedefault=True, ),
        out_file=dict(),
    )
    inputs = JSONFileSink.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_JSONFileSink_inputs():
    input_map = dict(_outputs=dict(usedefault=True,
    ),
    ignore_exception=dict(nohash=True,
    usedefault=True,
    ),
    in_dict=dict(usedefault=True,
    ),
    out_file=dict(),
    )
    inputs = JSONFileSink.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value
Exemple #5
0
    def __init__(self, parent, dir_dic, bids):
        super().__init__(parent, dir_dic, bids)

        # Create interfaces ============================================================================================
        # BET
        T1w_BET = Node(BET(), name="T1w_BET")
        T1w_BET.btn_string = 'T1w Brain Extraction'
        self.interfaces.append(T1w_BET)

        T1w_gad_BET = Node(BET(), name="T1w_gad_BET")
        T1w_gad_BET.btn_string = 'T1w Gadolinium Enhanced Brain Extraction'
        self.interfaces.append(T1w_gad_BET)

        T2w_dbs_BET = Node(BET(), name="T2w_dbs_BET")
        T2w_dbs_BET.btn_string = 'T2w DBS Acquisition Brain Extraction'
        self.interfaces.append(T2w_dbs_BET)

        dwi_BET = Node(BET(), name="dwi_BET")
        dwi_BET.btn_string = 'dwi Brain Extraction'
        self.interfaces.append(dwi_BET)

        # BFC
        T1w_BFC = Node(N4BiasFieldCorrection(), name="T1w_BFC")
        T1w_BFC.btn_string = 'T1w Bias Field Correction'
        self.interfaces.append(T1w_BFC)

        # Split
        dwi_ROI_b0 = Node(ExtractROI(), name="dwi_ROI_b0")
        dwi_ROI_b0.btn_string = 'dwi Extract b0'
        self.interfaces.append(dwi_ROI_b0)

        # Eddy current correction
        dwi_Eddy = Node(Eddy(), name="dwi_Eddy")
        dwi_Eddy.btn_string = 'dwi Eddy Current Correction'
        self.interfaces.append(dwi_Eddy)

        # Distortion correction
        # as this section is script/comment heavy it was put into a function
        self.distortion_correction_workflow()

        # Data output (i.e. sink) ======================================================================================
        self.sink = Node(DataSink(), name="sink")
        self.sink.btn_string = 'data sink'
        self.sink.inputs.base_directory = self.dir_dic['data_dir']

        self.jsink = Node(JSONFileSink(), name="jsink")
        self.jsink.btn_string = 'json sink'
        self.jsink.inputs.base_directory = self.dir_dic['data_dir']

        # Initialize workflow ==========================================================================================
        self.wf = Workflow(name='pre_processing')

        # T1w BET to ants N4BiasFieldCorrection
        self.wf.connect([(self.return_interface("T1w_BET"),
                          self.return_interface("T1w_BFC"),
                          [("out_file", "input_image")])])
        self.wf.connect([(self.return_interface("T1w_BET"),
                          self.return_interface("T1w_BFC"), [("mask_file",
                                                              "mask_image")])])

        # Eddy
        self.wf.connect([(self.return_interface("dwi_BET"),
                          self.return_interface("dwi_Eddy"), [("out_file",
                                                               "in_file")])])

        self.wf.connect([(self.return_interface("dwi_BET"),
                          self.return_interface("dwi_Eddy"), [("mask_file",
                                                               "in_mask")])])

        # ROI b0
        self.wf.connect([(self.return_interface("dwi_Eddy"),
                          self.return_interface("dwi_ROI_b0"),
                          [("out_corrected", "in_file")])])

        # Distortion Correction:
        # b0_T1_Reg:
        #   -i: moving image
        #   -r: T1
        #   -x: T1 mask
        self.wf.connect([(self.return_interface("dwi_ROI_b0"),
                          self.return_interface("b0_T1w_Reg"),
                          [("roi_file", "moving_image")])])

        self.wf.connect([(self.return_interface("T1w_BFC"),
                          self.return_interface("b0_T1w_Reg"),
                          [("output_image", "fixed_image")])])

        # test remove as doesn't seem useful (see self.distortion_correction_workflow()) and causes a crash when added
        # self.wf.connect([(self.return_interface("T1w_BET"), self.return_interface("b0_T1w_Reg"),
        #                   [("mask_file", "fixed_image_mask")])])

        # dwi_T1_Tran:
        #   -i: Eddy corrected image
        #   -r: Eddy corrected b0
        #   -t: transforms
        self.wf.connect([(self.return_interface("dwi_Eddy"),
                          self.return_interface("dwi_T1w_Tran"),
                          [("out_corrected", "input_image")])])

        self.wf.connect([(self.return_interface("dwi_ROI_b0"),
                          self.return_interface("dwi_T1w_Tran"),
                          [("roi_file", "reference_image")])])

        self.wf.connect([(self.return_interface("b0_T1w_Reg"),
                          self.return_interface("dwi_T1w_Tran"),
                          [("composite_transform", "transforms")])])

        # BaseInterface generates a dict mapping button strings to the workflow nodes
        # self.map_workflow()
        graph_file = self.wf.write_graph("pre_processing", graph2use='flat')
        self.graph_file = graph_file.replace("pre_processing.png",
                                             "pre_processing_detailed.png")

        self.init_settings()
        self.init_ui()
    def build_core_nodes(self):
        """Build and connect the core nodes of the pipelines.
        """

        import clinica.pipelines.statistics_surface.statistics_surface_utils as utils
        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe
        from nipype.interfaces.io import JSONFileSink

        # Node to fetch the input variables
        data_prep = npe.Node(name='inputnode',
                             interface=nutil.Function(
                                 input_names=[
                                     'input_directory', 'subjects_visits_tsv',
                                     'group_label', 'glm_type'
                                 ],
                                 output_names=[
                                     'path_to_matscript', 'surfstat_input_dir',
                                     'output_directory', 'freesurfer_home',
                                     'out_json'
                                 ],
                                 function=utils.prepare_data))
        data_prep.inputs.input_directory = self.caps_directory
        data_prep.inputs.subjects_visits_tsv = self.tsv_file

        # Node to wrap the SurfStat matlab script
        surfstat = npe.Node(
            name='surfstat',
            interface=nutil.Function(input_names=[
                'input_directory', 'output_directory', 'subjects_visits_tsv',
                'design_matrix', 'contrast', 'str_format', 'glm_type',
                'group_label', 'freesurfer_home', 'surface_file',
                'path_to_matscript', 'full_width_at_half_maximum',
                'threshold_uncorrected_pvalue', 'threshold_corrected_pvalue',
                'cluster_threshold', 'feature_label'
            ],
                                     output_names=['out_images'],
                                     function=utils.runmatlab))
        surfstat.inputs.subjects_visits_tsv = self.tsv_file

        # Node to create the dictionary for JSONFileSink
        jsondict = npe.Node(
            name='Jsondict',
            interface=nutil.Function(input_names=[
                'glm_type', 'design_matrix', 'str_format', 'contrast',
                'group_label', 'full_width_at_half_maximum',
                'threshold_uncorrected_pvalue', 'threshold_corrected_pvalue',
                'cluster_threshold'
            ],
                                     output_names=['json_dict'],
                                     function=utils.json_dict_create))

        # Node to write the GLM information into a JSON file
        jsonsink = npe.Node(JSONFileSink(input_names=['out_file']),
                            name='jsonsinker')

        # Connection
        # ==========
        self.connect([
            (self.input_node, data_prep, [('group_label', 'group_label')
                                          ]),  # noqa
            (self.input_node, data_prep, [('glm_type', 'glm_type')]),  # noqa
            (self.input_node, surfstat, [('design_matrix', 'design_matrix')
                                         ]),  # noqa
            (self.input_node, surfstat, [('contrast', 'contrast')]),  # noqa
            (self.input_node, surfstat, [('str_format', 'str_format')
                                         ]),  # noqa
            (self.input_node, surfstat, [('glm_type', 'glm_type')]),  # noqa
            (self.input_node, surfstat, [('group_label', 'group_label')
                                         ]),  # noqa
            (self.input_node, surfstat, [
                ('full_width_at_half_maximum', 'full_width_at_half_maximum')
            ]),  # noqa
            (self.input_node, surfstat, [('threshold_uncorrected_pvalue',
                                          'threshold_uncorrected_pvalue')
                                         ]),  # noqa
            (self.input_node, surfstat, [
                ('threshold_corrected_pvalue', 'threshold_corrected_pvalue')
            ]),  # noqa
            (self.input_node, surfstat, [('cluster_threshold',
                                          'cluster_threshold')]),  # noqa
            (self.input_node, surfstat, [('surface_file', 'surface_file')
                                         ]),  # noqa
            (self.input_node, surfstat, [('feature_label', 'feature_label')]),
            (data_prep, surfstat, [('surfstat_input_dir', 'input_directory')
                                   ]),  # noqa
            (data_prep, surfstat, [('path_to_matscript', 'path_to_matscript')
                                   ]),  # noqa
            (data_prep, surfstat, [('output_directory', 'output_directory')
                                   ]),  # noqa
            (data_prep, surfstat, [('freesurfer_home', 'freesurfer_home')
                                   ]),  # noqa
            (self.input_node, jsondict, [('glm_type', 'glm_type')]),  # noqa
            (self.input_node, jsondict, [('design_matrix', 'design_matrix')
                                         ]),  # noqa
            (self.input_node, jsondict, [('str_format', 'str_format')
                                         ]),  # noqa
            (self.input_node, jsondict, [('contrast', 'contrast')]),  # noqa
            (self.input_node, jsondict, [('group_label', 'group_label')
                                         ]),  # noqa
            (self.input_node, jsondict, [
                ('full_width_at_half_maximum', 'full_width_at_half_maximum')
            ]),  # noqa
            (self.input_node, jsondict, [('threshold_uncorrected_pvalue',
                                          'threshold_uncorrected_pvalue')
                                         ]),  # noqa
            (self.input_node, jsondict, [
                ('threshold_corrected_pvalue', 'threshold_corrected_pvalue')
            ]),  # noqa
            (self.input_node, jsondict, [('cluster_threshold',
                                          'cluster_threshold')]),  # noqa
            (data_prep, jsonsink, [('out_json', 'out_file')]),  # noqa
            (jsondict, jsonsink, [('json_dict', 'in_dict')]),  # noqa
        ])
Exemple #7
0
    def __init__(self, parent, dir_dic, bids):
        super().__init__(parent, dir_dic, bids)

        # Create interfaces ============================================================================================
        # BET
        MNI_BET = Node(BET(), name="MNI_BET")
        MNI_BET.btn_string = 'MNI Template Brain Extraction'
        self.interfaces.append(MNI_BET)

        # Registration
        postopCT_T1_Reg = Node(Registration(), name="postopCT_T1_Reg")
        postopCT_T1_Reg.btn_string = 'post-op CT to T1w Registration'
        self.interfaces.append(postopCT_T1_Reg)

        preopCT_T1_Reg = Node(Registration(), name="preopCT_T1_Reg")
        preopCT_T1_Reg.btn_string = 'pre-op CT to T1w Registration'
        self.interfaces.append(preopCT_T1_Reg)

        T1_MNI_Reg = Node(Registration(), name="T1_MNI_Reg")
        T1_MNI_Reg.btn_string = 'T1w to MNI template Registration'
        self.interfaces.append(T1_MNI_Reg)

        # Transformations
        postopCT_T1_Tran = Node(ApplyTransforms(), name="postopCT_T1_Tran")
        postopCT_T1_Tran.btn_string = 'post-op CT to T1w Transformation'
        self.interfaces.append(postopCT_T1_Tran)

        preopCT_T1_Tran = Node(ApplyTransforms(), name="preopCT_T1_Tran")
        preopCT_T1_Tran.btn_string = 'pre-op CT to T1w Transformation'
        self.interfaces.append(preopCT_T1_Tran)

        T1_MNI_Tran = Node(ApplyTransforms(), name="T1_MNI_Tran")
        T1_MNI_Tran.btn_string = 'T1w to MNI template Transformation'
        self.interfaces.append(T1_MNI_Tran)

        # Data output (i.e. sink) ======================================================================================
        self.sink = Node(DataSink(), name="sink")
        self.sink.btn_string = 'data sink'
        self.sink.inputs.base_directory = self.dir_dic['data_dir']

        self.jsink = Node(JSONFileSink(), name="jsink")
        self.jsink.btn_string = 'json sink'
        self.jsink.inputs.base_directory = self.dir_dic['data_dir']

        # Initialize workflow ==========================================================================================
        self.wf = Workflow(name='co_registration')

        # Brain extracted MNI template to antsRegistration
        # MI[mni_t1_brain.nii.gz,t1_nonGdE_brain_N4bfc_masked.nii.gz,1,32,Regular,0.25]
        # MI[fixedImage,movingImage,metricWeight,numberOfBins,<samplingStrategy={None,Regular,Random}>,<samplingPercentage=[0,1]>]
        self.wf.connect([(self.return_interface("MNI_BET"),
                          self.return_interface("T1_MNI_Reg"),
                          [("out_file", "fixed_image")])])

        self.wf.connect([(self.return_interface("MNI_BET"),
                          self.return_interface("T1_MNI_Tran"),
                          [("out_file", "reference_image")])])

        # T1 -> MNI Reg to Tran
        self.wf.connect([(self.return_interface("T1_MNI_Reg"),
                          self.return_interface("T1_MNI_Tran"),
                          [("composite_transform", "transforms")])])

        # postop CT -> T1 Reg to Tran
        self.wf.connect([(self.return_interface("postopCT_T1_Reg"),
                          self.return_interface("postopCT_T1_Tran"),
                          [("composite_transform", "transforms")])])

        # preop CT -> T1 Reg to Tran
        self.wf.connect([(self.return_interface("preopCT_T1_Reg"),
                          self.return_interface("preopCT_T1_Tran"),
                          [("composite_transform", "transforms")])])

        # BaseInterface generates a dict mapping button strings to the workflow nodes
        self.wf.base_dir = self.dir_dic['temp_dir']

        graph_file = self.wf.write_graph("co_registration", graph2use='flat')
        self.graph_file = graph_file.replace("co_registration.png",
                                             "co_registration_detailed.png")

        self.init_settings()
        self.init_ui()