Пример #1
0
def test_NewSegment_inputs():
    input_map = dict(
        affine_regularization=dict(field='warp.affreg', ),
        channel_files=dict(
            copyfile=False,
            field='channel',
            mandatory=True,
        ),
        channel_info=dict(field='channel', ),
        ignore_exception=dict(
            nohash=True,
            usedefault=True,
        ),
        matlab_cmd=dict(),
        mfile=dict(usedefault=True, ),
        paths=dict(),
        sampling_distance=dict(field='warp.samp', ),
        tissues=dict(field='tissue', ),
        use_mcr=dict(),
        use_v8struct=dict(
            min_ver='8',
            usedefault=True,
        ),
        warping_regularization=dict(field='warp.reg', ),
        write_deformation_fields=dict(field='warp.write', ),
    )
    inputs = NewSegment.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value
Пример #2
0
def test_NewSegment_inputs():
    input_map = dict(affine_regularization=dict(field='warp.affreg',
    ),
    channel_files=dict(copyfile=False,
    field='channel',
    mandatory=True,
    ),
    channel_info=dict(field='channel',
    ),
    ignore_exception=dict(nohash=True,
    usedefault=True,
    ),
    matlab_cmd=dict(),
    mfile=dict(usedefault=True,
    ),
    paths=dict(),
    sampling_distance=dict(field='warp.samp',
    ),
    tissues=dict(field='tissue',
    ),
    use_mcr=dict(),
    use_v8struct=dict(min_ver='8',
    usedefault=True,
    ),
    warping_regularization=dict(field='warp.reg',
    ),
    write_deformation_fields=dict(field='warp.write',
    ),
    )
    inputs = NewSegment.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value
Пример #3
0
def test_NewSegment_outputs():
    output_map = dict(bias_corrected_images=dict(),
    bias_field_images=dict(),
    dartel_input_images=dict(),
    forward_deformation_field=dict(),
    inverse_deformation_field=dict(),
    modulated_class_images=dict(),
    native_class_images=dict(),
    normalized_class_images=dict(),
    transformation_mat=dict(),
    )
    outputs = NewSegment.output_spec()

    for key, metadata in output_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(outputs.traits()[key], metakey), value
Пример #4
0
def test_NewSegment_outputs():
    output_map = dict(
        bias_corrected_images=dict(),
        bias_field_images=dict(),
        dartel_input_images=dict(),
        forward_deformation_field=dict(),
        inverse_deformation_field=dict(),
        modulated_class_images=dict(),
        native_class_images=dict(),
        normalized_class_images=dict(),
        transformation_mat=dict(),
    )
    outputs = NewSegment.output_spec()

    for key, metadata in output_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(outputs.traits()[key], metakey), value
Пример #5
0
    def segmentation_pipeline(self, **kwargs):  # @UnusedVariable @IgnorePep8

        pipeline = self.create_pipeline(
            name='ute1_segmentation',
            inputs=[DatasetSpec('ute1_registered', nifti_format)],
            outputs=[
                DatasetSpec('air_mask', nifti_gz_format),
                DatasetSpec('bones_mask', nifti_gz_format)
            ],
            desc="Segmentation of the first echo UTE image",
            version=1,
            citations=(spm_cite, matlab_cite),
            **kwargs)

        segmentation = pipeline.create_node(
            NewSegment(),
            name='ute1_registered_segmentation',
            requirements=[matlab2015_req, spm12_req],
            wall_time=480)
        pipeline.connect_input('ute1_registered', segmentation,
                               'channel_files')
        segmentation.inputs.affine_regularization = 'none'
        tissue1 = ((self.tpm_path, 1), 1, (True, False), (False, False))
        tissue2 = ((self.tpm_path, 2), 1, (True, False), (False, False))
        tissue3 = ((self.tpm_path, 3), 2, (True, False), (False, False))
        tissue4 = ((self.tpm_path, 4), 3, (True, False), (False, False))
        tissue5 = ((self.tpm_path, 5), 4, (True, False), (False, False))
        tissue6 = ((self.tpm_path, 6), 3, (True, False), (False, False))
        segmentation.inputs.tissues = [
            tissue1, tissue2, tissue3, tissue4, tissue5, tissue6
        ]

        select_bones_pm = pipeline.create_node(
            Select(),
            name='select_bones_pm_from_SPM_new_segmentation',
            requirements=[],
            wall_time=5)
        pipeline.connect(segmentation, 'native_class_images', select_bones_pm,
                         'inlist')
        select_bones_pm.inputs.index = 3

        select_air_pm = pipeline.create_node(
            Select(),
            name='select_air_pm_from_SPM_new_segmentation',
            requirements=[],
            wall_time=5)

        pipeline.connect(segmentation, 'native_class_images', select_air_pm,
                         'inlist')
        select_air_pm.inputs.index = 5

        threshold_bones = pipeline.create_node(
            Threshold(),
            name='bones_probabilistic_map_thresholding',
            requirements=[fsl5_req],
            wall_time=5)
        pipeline.connect(select_bones_pm, 'out', threshold_bones, 'in_file')
        threshold_bones.inputs.output_type = "NIFTI_GZ"
        threshold_bones.inputs.direction = 'below'
        threshold_bones.inputs.thresh = 0.2

        binarize_bones = pipeline.create_node(
            UnaryMaths(),
            name='bones_probabilistic_map_binarization',
            requirements=[fsl5_req],
            wall_time=5)
        pipeline.connect(threshold_bones, 'out_file', binarize_bones,
                         'in_file')
        binarize_bones.inputs.output_type = "NIFTI_GZ"
        binarize_bones.inputs.operation = 'bin'

        threshold_air = pipeline.create_node(
            Threshold(),
            name='air_probabilistic_maps_thresholding',
            requirements=[fsl5_req],
            wall_time=5)
        pipeline.connect(select_air_pm, 'out', threshold_air, 'in_file')
        threshold_air.inputs.output_type = "NIFTI_GZ"
        threshold_air.inputs.direction = 'below'
        threshold_air.inputs.thresh = 0.1

        binarize_air = pipeline.create_node(
            UnaryMaths(),
            name='air_probabilistic_map_binarization',
            requirements=[fsl5_req],
            wall_time=5)
        pipeline.connect(threshold_air, 'out_file', binarize_air, 'in_file')
        binarize_air.inputs.output_type = "NIFTI_GZ"
        binarize_air.inputs.operation = 'bin'

        pipeline.connect_output('bones_mask', binarize_bones, 'out_file')
        pipeline.connect_output('air_mask', binarize_air, 'out_file')
        pipeline.assert_connected()

        return pipeline