Example #1
0
 def tensor_pipeline(self, **kwargs):  # @UnusedVariable
     """
     Fits the apparrent diffusion tensor (DT) to each voxel of the image
     """
     pipeline = self.create_pipeline(
         name='tensor',
         inputs=[
             DatasetSpec('bias_correct', nifti_gz_format),
             DatasetSpec('grad_dirs', fsl_bvecs_format),
             DatasetSpec('bvalues', fsl_bvals_format),
             DatasetSpec('brain_mask', nifti_gz_format)
         ],
         outputs=[DatasetSpec('tensor', nifti_gz_format)],
         desc=("Estimates the apparent diffusion tensor in each "
               "voxel"),
         version=1,
         citations=[],
         **kwargs)
     # Create tensor fit node
     dwi2tensor = pipeline.create_node(FitTensor(), name='dwi2tensor')
     dwi2tensor.inputs.out_file = 'dti.nii.gz'
     # Gradient merge node
     fsl_grads = pipeline.create_node(MergeTuple(2), name="fsl_grads")
     # Connect nodes
     pipeline.connect(fsl_grads, 'out', dwi2tensor, 'grad_fsl')
     # Connect to inputs
     pipeline.connect_input('grad_dirs', fsl_grads, 'in1')
     pipeline.connect_input('bvalues', fsl_grads, 'in2')
     pipeline.connect_input('bias_correct', dwi2tensor, 'in_file')
     pipeline.connect_input('brain_mask', dwi2tensor, 'in_mask')
     # Connect to outputs
     pipeline.connect_output('tensor', dwi2tensor, 'out_file')
     # Check inputs/output are connected
     return pipeline
Example #2
0
File: dmri.py Project: amrka/banana
    def tensor_pipeline(self, **name_maps):  # @UnusedVariable
        """
        Fits the apparrent diffusion tensor (DT) to each voxel of the image
        """

#             inputs=[FilesetSpec('bias_correct', nifti_gz_format),
#                     FilesetSpec('grad_dirs', fsl_bvecs_format),
#                     FilesetSpec('bvalues', fsl_bvals_format),
#                     FilesetSpec('brain_mask', nifti_gz_format)],
#             outputs=[FilesetSpec('tensor', nifti_gz_format)],

        pipeline = self.new_pipeline(
            name='tensor',
            desc=("Estimates the apparent diffusion tensor in each "
                  "voxel"),
            references=[],
            name_maps=name_maps)
        # Create tensor fit node
        dwi2tensor = pipeline.add(
            'dwi2tensor',
            FitTensor())
        dwi2tensor.inputs.out_file = 'dti.nii.gz'
        # Gradient merge node
        fsl_grads = pipeline.add("fsl_grads", MergeTuple(2))
        # Connect nodes
        pipeline.connect(fsl_grads, 'out', dwi2tensor, 'grad_fsl')
        # Connect to inputs
        pipeline.connect_input('grad_dirs', fsl_grads, 'in1')
        pipeline.connect_input('bvalues', fsl_grads, 'in2')
        pipeline.connect_input('bias_correct', dwi2tensor, 'in_file')
        pipeline.connect_input('brain_mask', dwi2tensor, 'in_mask')
        # Connect to outputs
        pipeline.connect_output('tensor', dwi2tensor, 'out_file')
        # Check inputs/output are connected
        return pipeline
Example #3
0
def test_FitTensor_outputs():
    output_map = dict(out_file=dict(), )
    outputs = FitTensor.output_spec()

    for key, metadata in output_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(outputs.traits()[key], metakey), value
Example #4
0
    def tensor_pipeline(self, **name_maps):
        """
        Fits the apparrent diffusion tensor (DT) to each voxel of the image
        """

        pipeline = self.new_pipeline(
            name='tensor',
            desc=("Estimates the apparent diffusion tensor in each "
                  "voxel"),
            citations=[],
            name_maps=name_maps)

        # Create tensor fit node
        pipeline.add(
            'dwi2tensor',
            FitTensor(
                out_file='dti.nii.gz'),
            inputs={
                'grad_fsl': self.fsl_grads(pipeline),
                'in_file': (self.series_preproc_spec_name, nifti_gz_format),
                'in_mask': (self.brain_mask_spec_name, nifti_gz_format)},
            outputs={
                'tensor': ('out_file', nifti_gz_format)},
            requirements=[mrtrix_req.v('3.0rc3')])

        return pipeline
def test_FitTensor_outputs():
    output_map = dict(out_file=dict(),
    )
    outputs = FitTensor.output_spec()

    for key, metadata in output_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(outputs.traits()[key], metakey), value
def test_FitTensor_inputs():
    input_map = dict(args=dict(argstr='%s',
    ),
    bval_scale=dict(argstr='-bvalue_scaling %s',
    ),
    environ=dict(nohash=True,
    usedefault=True,
    ),
    grad_file=dict(argstr='-grad %s',
    ),
    grad_fsl=dict(argstr='-fslgrad %s %s',
    ),
    ignore_exception=dict(nohash=True,
    usedefault=True,
    ),
    in_bval=dict(),
    in_bvec=dict(argstr='-fslgrad %s %s',
    ),
    in_file=dict(argstr='%s',
    mandatory=True,
    position=-2,
    ),
    in_mask=dict(argstr='-mask %s',
    ),
    method=dict(argstr='-method %s',
    ),
    nthreads=dict(argstr='-nthreads %d',
    nohash=True,
    ),
    out_file=dict(argstr='%s',
    mandatory=True,
    position=-1,
    usedefault=True,
    ),
    reg_term=dict(argstr='-regularisation %f',
    ),
    terminal_output=dict(nohash=True,
    ),
    )
    inputs = FitTensor.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value
Example #7
0
def test_FitTensor_inputs():
    input_map = dict(
        args=dict(argstr='%s', ),
        bval_scale=dict(argstr='-bvalue_scaling %s', ),
        environ=dict(
            nohash=True,
            usedefault=True,
        ),
        grad_file=dict(argstr='-grad %s', ),
        grad_fsl=dict(argstr='-fslgrad %s %s', ),
        ignore_exception=dict(
            nohash=True,
            usedefault=True,
        ),
        in_bval=dict(),
        in_bvec=dict(argstr='-fslgrad %s %s', ),
        in_file=dict(
            argstr='%s',
            mandatory=True,
            position=-2,
        ),
        in_mask=dict(argstr='-mask %s', ),
        method=dict(argstr='-method %s', ),
        nthreads=dict(
            argstr='-nthreads %d',
            nohash=True,
        ),
        out_file=dict(
            argstr='%s',
            mandatory=True,
            position=-1,
            usedefault=True,
        ),
        reg_term=dict(argstr='-regularisation %f', ),
        terminal_output=dict(nohash=True, ),
    )
    inputs = FitTensor.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value