コード例 #1
0
def _apply_perslice_warp(apply_to_file,
                         warp_files,
                         voxel_size_x,
                         voxel_size_y,
                         write_dir=None,
                         caching=False,
                         verbose=True,
                         terminal_output='allatonce',
                         environ=None):

    # Apply the precomputed warp slice by slice
    if write_dir is None:
        write_dir = os.path.dirname(apply_to_file),

    if environ is None:
        environ = {'AFNI_DECONFLICT': 'OVERWRITE'}

    if caching:
        memory = Memory(write_dir)
        resample = memory.cache(afni.Resample)
        slicer = memory.cache(fsl.Slice)
        warp_apply = memory.cache(afni.NwarpApply)
        qwarp = memory.cache(afni.Qwarp)
        merge = memory.cache(fsl.Merge)
        for step in [resample, slicer, warp_apply, qwarp, merge]:
            step.interface().set_default_terminal_output(terminal_output)
    else:
        resample = afni.Resample(terminal_output=terminal_output).run
        slicer = fsl.Slice(terminal_output=terminal_output).run
        warp_apply = afni.NwarpApply(terminal_output=terminal_output).run
        qwarp = afni.Qwarp(terminal_output=terminal_output).run
        merge = fsl.Merge(terminal_output=terminal_output).run

    apply_to_img = nibabel.load(apply_to_file)
    n_slices = apply_to_img.header.get_data_shape()[2]

    if len(warp_files) != n_slices:
        raise ValueError('number of warp files {0} does not match number of '
                         'slices {1}'.format(len(warp_files), n_slices))
    # Slice anatomical image
    output_files = []
    per_slice_dir = os.path.join(write_dir, 'per_slice')
    if not os.path.isdir(per_slice_dir):
        os.makedirs(per_slice_dir)

    # slice functional
    sliced_apply_to_files = []
    out_slicer = slicer(in_file=apply_to_file,
                        out_base_name=fname_presuffix(apply_to_file,
                                                      newpath=per_slice_dir,
                                                      use_ext=False))
    sliced_apply_to_files = _get_fsl_slice_output_files(
        out_slicer.inputs['out_base_name'], out_slicer.inputs['output_type'])

    warped_apply_to_slices = []
    sliced_apply_to_files_to_remove = []
    for (sliced_apply_to_file, warp_file) in zip(sliced_apply_to_files,
                                                 warp_files):
        if warp_file is None:
            warped_apply_to_slices.append(sliced_apply_to_file)
        else:
            sliced_apply_to_files_to_remove.append(sliced_apply_to_file)
            out_warp_apply = warp_apply(in_file=sliced_apply_to_file,
                                        master=sliced_apply_to_file,
                                        warp=warp_file,
                                        out_file=fname_presuffix(
                                            sliced_apply_to_file,
                                            suffix='_qwarped'),
                                        environ=environ)
            warped_apply_to_slices.append(out_warp_apply.outputs.out_file)

    # Fix the obliquity
    oblique_warped_apply_to_slices = []
    for (sliced_apply_to_file,
         warped_apply_to_slice) in zip(sliced_apply_to_files,
                                       warped_apply_to_slices):
        oblique_slice = fix_obliquity(warped_apply_to_slice,
                                      sliced_apply_to_file,
                                      verbose=verbose,
                                      caching=caching,
                                      caching_dir=per_slice_dir,
                                      environ=environ)
        oblique_warped_apply_to_slices.append(oblique_slice)

    # Finally, merge all slices !
    out_merge_apply_to = merge(in_files=oblique_warped_apply_to_slices,
                               dimension='z',
                               merged_file=fname_presuffix(apply_to_file,
                                                           suffix='_perslice',
                                                           newpath=write_dir),
                               environ=environ)

    # Fix the obliquity
    merged_apply_to_file = fix_obliquity(
        out_merge_apply_to.outputs.merged_file,
        apply_to_file,
        verbose=verbose,
        caching=caching,
        caching_dir=per_slice_dir,
        environ=environ)

    # Update the outputs
    output_files.extend(sliced_apply_to_files_to_remove +
                        oblique_warped_apply_to_slices)

    if not caching:
        for out_file in output_files:
            os.remove(out_file)

    return merged_apply_to_file
コード例 #2
0
ファイル: __init__.py プロジェクト: soichih/C-PAC
def temporal_variance_mask(threshold, by_slice=False, erosion=False, degree=1):

    threshold_method = "VAR"

    if isinstance(threshold, str):
        regex_match = {
            "SD": r"([0-9]+(\.[0-9]+)?)\s*SD",
            "PCT": r"([0-9]+(\.[0-9]+)?)\s*PCT",
        }

        for method, regex in regex_match.items():
            matched = re.match(regex, threshold)
            if matched:
                threshold_method = method
                threshold_value = matched.groups()[0]

    try:
        threshold_value = float(threshold_value)
    except:
        raise ValueError(
            "Error converting threshold value {0} from {1} to a "
            "floating point number. The threshold value can "
            "contain SD or PCT for selecting a threshold based on "
            "the variance distribution, otherwise it should be a "
            "floating point number.".format(threshold_value, threshold))

    if threshold_value < 0:
        raise ValueError(
            "Threshold value should be positive, instead of {0}.".format(
                threshold_value))

    if threshold_method is "PCT" and threshold_value >= 100.0:
        raise ValueError(
            "Percentile should be less than 100, received {0}.".format(
                threshold_value))

    threshold = threshold_value

    wf = pe.Workflow(name='tcompcor')

    input_node = pe.Node(util.IdentityInterface(
        fields=['functional_file_path', 'mask_file_path']),
                         name='inputspec')
    output_node = pe.Node(util.IdentityInterface(fields=['mask']),
                          name='outputspec')

    # C-PAC default performs linear regression while nipype performs quadratic regression
    detrend = pe.Node(afni.Detrend(args='-polort {0}'.format(degree),
                                   outputtype='NIFTI'),
                      name='detrend')
    wf.connect(input_node, 'functional_file_path', detrend, 'in_file')

    std = pe.Node(afni.TStat(args='-nzstdev', outputtype='NIFTI'), name='std')
    wf.connect(input_node, 'mask_file_path', std, 'mask')
    wf.connect(detrend, 'out_file', std, 'in_file')

    var = pe.Node(afni.Calc(expr='a*a', outputtype='NIFTI'), name='var')
    wf.connect(std, 'out_file', var, 'in_file_a')

    if by_slice:
        slices = pe.Node(fsl.Slice(), name='slicer')
        wf.connect(var, 'out_file', slices, 'in_file')

        mask_slices = pe.Node(fsl.Slice(), name='mask_slicer')
        wf.connect(input_node, 'mask_file_path', mask_slices, 'in_file')

        mapper = pe.MapNode(
            util.IdentityInterface(fields=['out_file', 'mask_file']),
            name='slice_mapper',
            iterfield=['out_file', 'mask_file'])
        wf.connect(slices, 'out_files', mapper, 'out_file')
        wf.connect(mask_slices, 'out_files', mapper, 'mask_file')

    else:
        mapper_list = pe.Node(util.Merge(1), name='slice_mapper_list')
        wf.connect(var, 'out_file', mapper_list, 'in1')

        mask_mapper_list = pe.Node(util.Merge(1),
                                   name='slice_mask_mapper_list')
        wf.connect(input_node, 'mask_file_path', mask_mapper_list, 'in1')

        mapper = pe.Node(
            util.IdentityInterface(fields=['out_file', 'mask_file']),
            name='slice_mapper')
        wf.connect(mapper_list, 'out', mapper, 'out_file')
        wf.connect(mask_mapper_list, 'out', mapper, 'mask_file')

    if threshold_method is "PCT":
        threshold_node = pe.MapNode(Function(
            input_names=['in_file', 'mask', 'threshold_pct'],
            output_names=['threshold'],
            function=compute_pct_threshold,
            as_module=True),
                                    name='threshold_value',
                                    iterfield=['in_file', 'mask'])
        threshold_node.inputs.threshold_pct = threshold_value
        wf.connect(mapper, 'out_file', threshold_node, 'in_file')
        wf.connect(mapper, 'mask_file', threshold_node, 'mask')

    elif threshold_method is "SD":
        threshold_node = pe.MapNode(Function(
            input_names=['in_file', 'mask', 'threshold_sd'],
            output_names=['threshold'],
            function=compute_sd_threshold,
            as_module=True),
                                    name='threshold_value',
                                    iterfield=['in_file', 'mask'])
        threshold_node.inputs.threshold_sd = threshold_value
        wf.connect(mapper, 'out_file', threshold_node, 'in_file')
        wf.connect(mapper, 'mask_file', threshold_node, 'mask')

    else:
        threshold_node = pe.MapNode(Function(
            input_names=['in_file', 'mask', 'threshold'],
            output_names=['threshold'],
            function=compute_threshold,
            as_module=True),
                                    name='threshold_value',
                                    iterfield=['in_file', 'mask'])
        threshold_node.inputs.threshold = threshold_value
        wf.connect(mapper, 'out_file', threshold_node, 'in_file')
        wf.connect(mapper, 'mask_file', threshold_node, 'mask')

    threshold_mask = pe.MapNode(interface=fsl.maths.Threshold(),
                                name='threshold',
                                iterfield=['in_file', 'thresh'])
    threshold_mask.inputs.args = '-bin'
    wf.connect(mapper, 'out_file', threshold_mask, 'in_file')
    wf.connect(threshold_node, 'threshold', threshold_mask, 'thresh')

    merge_slice_masks = pe.Node(interface=fsl.Merge(),
                                name='merge_slice_masks')
    merge_slice_masks.inputs.dimension = 'z'
    wf.connect(threshold_mask, 'out_file', merge_slice_masks, 'in_files')

    wf.connect(merge_slice_masks, 'merged_file', output_node, 'mask')

    return wf
コード例 #3
0
def _per_slice_qwarp(to_qwarp_file,
                     reference_file,
                     voxel_size_x,
                     voxel_size_y,
                     apply_to_file=None,
                     write_dir=None,
                     caching=False,
                     verbose=True,
                     terminal_output='allatonce',
                     environ=None):
    if write_dir is None:
        write_dir = os.path.dirname(to_qwarp_file),

    if environ is None:
        environ = {'AFNI_DECONFLICT': 'OVERWRITE'}

    if caching:
        memory = Memory(write_dir)
        resample = memory.cache(afni.Resample)
        slicer = memory.cache(fsl.Slice)
        warp_apply = memory.cache(afni.NwarpApply)
        qwarp = memory.cache(afni.Qwarp)
        merge = memory.cache(fsl.Merge)
        for step in [resample, slicer, warp_apply, qwarp, merge]:
            step.interface().set_default_terminal_output(terminal_output)
    else:
        resample = afni.Resample(terminal_output=terminal_output).run
        slicer = fsl.Slice(terminal_output=terminal_output).run
        warp_apply = afni.NwarpApply(terminal_output=terminal_output).run
        qwarp = afni.Qwarp(terminal_output=terminal_output).run
        merge = fsl.Merge(terminal_output=terminal_output).run

    # Slice anatomical image
    reference_img = nibabel.load(reference_file)
    per_slice_dir = os.path.join(write_dir, 'per_slice')
    if not os.path.isdir(per_slice_dir):
        os.makedirs(per_slice_dir)

    out_slicer = slicer(in_file=reference_file,
                        out_base_name=fname_presuffix(reference_file,
                                                      newpath=per_slice_dir,
                                                      use_ext=False))
    # XXX: workaround for nipype globbing to find slicer outputs
    # Use out_slicer.outputs.out_files once fixed
    sliced_reference_files = _get_fsl_slice_output_files(
        out_slicer.inputs['out_base_name'], out_slicer.inputs['output_type'])

    # Slice mean functional
    out_slicer = slicer(in_file=to_qwarp_file,
                        out_base_name=fname_presuffix(to_qwarp_file,
                                                      newpath=per_slice_dir,
                                                      use_ext=False))
    sliced_to_qwarp_files = _get_fsl_slice_output_files(
        out_slicer.inputs['out_base_name'], out_slicer.inputs['output_type'])

    # Below line is to deal with slices where there is no signal (for example
    # rostral end of some anatomicals)

    # The inverse warp frequently fails, Resampling can help it work better
    # XXX why specifically .1 in voxel_size ?
    voxel_size_z = reference_img.header.get_zooms()[2]
    resampled_sliced_reference_files = []
    for sliced_reference_file in sliced_reference_files:
        out_resample = resample(in_file=sliced_reference_file,
                                voxel_size=(voxel_size_x, voxel_size_y,
                                            voxel_size_z),
                                out_file=fname_presuffix(sliced_reference_file,
                                                         suffix='_resampled'),
                                environ=environ)
        resampled_sliced_reference_files.append(out_resample.outputs.out_file)

    resampled_sliced_to_qwarp_files = []
    for sliced_to_qwarp_file in sliced_to_qwarp_files:
        out_resample = resample(in_file=sliced_to_qwarp_file,
                                voxel_size=(voxel_size_x, voxel_size_y,
                                            voxel_size_z),
                                out_file=fname_presuffix(sliced_to_qwarp_file,
                                                         suffix='_resampled'),
                                environ=environ)
        resampled_sliced_to_qwarp_files.append(out_resample.outputs.out_file)

    # single slice non-linear functional to anatomical registration
    warped_slices = []
    warp_files = []
    output_files = []
    resampled_sliced_to_qwarp_files_to_remove = []
    for (resampled_sliced_to_qwarp_file,
         resampled_sliced_reference_file) in zip(
             resampled_sliced_to_qwarp_files,
             resampled_sliced_reference_files):
        warped_slice = fname_presuffix(resampled_sliced_to_qwarp_file,
                                       suffix='_qwarped')
        to_qwarp_data = nibabel.load(resampled_sliced_to_qwarp_file).get_data()
        ref_data = nibabel.load(resampled_sliced_reference_file).get_data()

        if to_qwarp_data.max() == 0 or ref_data.max() == 0:
            # deal with slices where there is no signal
            warped_slices.append(resampled_sliced_to_qwarp_file)
            warp_files.append(None)
        else:
            resampled_sliced_to_qwarp_files_to_remove.append(
                resampled_sliced_to_qwarp_file)
            out_qwarp = qwarp(
                in_file=resampled_sliced_to_qwarp_file,
                base_file=resampled_sliced_reference_file,
                noneg=True,
                blur=[0],
                nmi=True,
                noXdis=True,
                allineate=True,
                allineate_opts='-parfix 1 0 -parfix 2 0 -parfix 3 0 '
                '-parfix 4 0 -parfix 5 0 -parfix 6 0 '
                '-parfix 7 0 -parfix 9 0 '
                '-parfix 10 0 -parfix 12 0',
                out_file=warped_slice,
                environ=environ,
                verb=verbose)
            # XXX fix qwarp bug : out_qwarp.outputs.warped_source extension is
            # +tlrc.HEAD if base_file and in_file are of different extensions
            warped_slices.append(warped_slice)
            warp_files.append(out_qwarp.outputs.source_warp)
            # There are files geenrated by the allineate option
            output_files.extend([
                fname_presuffix(out_qwarp.outputs.warped_source,
                                suffix='_Allin.nii',
                                use_ext=False),
                fname_presuffix(out_qwarp.outputs.warped_source,
                                suffix='_Allin.aff12.1D',
                                use_ext=False)
            ])

    # Resample the mean volume back to the initial resolution,
    voxel_size = nibabel.load(to_qwarp_file).header.get_zooms()[:3]
    resampled_warped_slices = []
    for warped_slice in warped_slices:
        out_resample = resample(in_file=warped_slice,
                                voxel_size=voxel_size,
                                out_file=fname_presuffix(warped_slice,
                                                         suffix='_resampled'),
                                environ=environ)
        resampled_warped_slices.append(out_resample.outputs.out_file)

    # fix the obliquity
    oblique_resampled_warped_slices = []
    for (sliced_reference_file,
         resampled_warped_slice) in zip(sliced_reference_files,
                                        resampled_warped_slices):
        oblique_slice = fix_obliquity(resampled_warped_slice,
                                      sliced_reference_file,
                                      verbose=verbose,
                                      caching=caching,
                                      caching_dir=per_slice_dir,
                                      environ=environ)
        oblique_resampled_warped_slices.append(oblique_slice)

    out_merge_func = merge(in_files=oblique_resampled_warped_slices,
                           dimension='z',
                           merged_file=fname_presuffix(to_qwarp_file,
                                                       suffix='_perslice',
                                                       newpath=write_dir),
                           environ=environ)

    # Fix the obliquity
    oblique_merged = fix_obliquity(out_merge_func.outputs.merged_file,
                                   reference_file,
                                   verbose=verbose,
                                   caching=caching,
                                   caching_dir=per_slice_dir,
                                   environ=environ)

    # Collect the outputs
    output_files.extend(sliced_reference_files + sliced_to_qwarp_files +
                        resampled_sliced_reference_files +
                        resampled_sliced_to_qwarp_files_to_remove +
                        warped_slices + oblique_resampled_warped_slices)

    # Apply the precomputed warp slice by slice
    if apply_to_file is not None:
        # slice functional
        out_slicer = slicer(in_file=apply_to_file,
                            out_base_name=fname_presuffix(
                                apply_to_file,
                                newpath=per_slice_dir,
                                use_ext=False))
        sliced_apply_to_files = _get_fsl_slice_output_files(
            out_slicer.inputs['out_base_name'],
            out_slicer.inputs['output_type'])
        warped_apply_to_slices = []
        sliced_apply_to_files_to_remove = []
        for (sliced_apply_to_file, warp_file) in zip(sliced_apply_to_files,
                                                     warp_files):
            if warp_file is None:
                warped_apply_to_slices.append(sliced_apply_to_file)
            else:
                sliced_apply_to_files_to_remove.append(sliced_apply_to_file)
                out_warp_apply = warp_apply(in_file=sliced_apply_to_file,
                                            master=sliced_apply_to_file,
                                            warp=warp_file,
                                            out_file=fname_presuffix(
                                                sliced_apply_to_file,
                                                suffix='_qwarped'),
                                            environ=environ)
                warped_apply_to_slices.append(out_warp_apply.outputs.out_file)

        # Fix the obliquity
        oblique_warped_apply_to_slices = []
        for (sliced_apply_to_file,
             warped_apply_to_slice) in zip(sliced_apply_to_files,
                                           warped_apply_to_slices):
            oblique_slice = fix_obliquity(warped_apply_to_slice,
                                          sliced_apply_to_file,
                                          verbose=verbose,
                                          caching=caching,
                                          caching_dir=per_slice_dir,
                                          environ=environ)
            oblique_warped_apply_to_slices.append(oblique_slice)

        # Finally, merge all slices !
        out_merge_apply_to = merge(in_files=oblique_warped_apply_to_slices,
                                   dimension='z',
                                   merged_file=fname_presuffix(
                                       apply_to_file,
                                       suffix='_perslice',
                                       newpath=write_dir),
                                   environ=environ)

        # Fix the obliquity
        merged_apply_to_file = fix_obliquity(
            out_merge_apply_to.outputs.merged_file,
            apply_to_file,
            verbose=verbose,
            caching=caching,
            caching_dir=per_slice_dir,
            environ=environ)

        # Update the outputs
        output_files.extend(sliced_apply_to_files_to_remove +
                            oblique_warped_apply_to_slices)
    else:
        merged_apply_to_file = None

    if not caching:
        for out_file in output_files:
            os.remove(out_file)

    return (oblique_merged, warp_files, merged_apply_to_file)