Exemplo n.º 1
0
def change_spacing_4D(img_in, new_spacing=1.25):
    """
    Note: Only works properly if affine is all 0 except for diagonal and offset (=no rotation and sheering)
    """
    data = img_in.get_data()
    old_shape = data.shape
    img_spacing = abs(img_in.affine[0, 0])

    # copy very important; otherwise new_affine changes will also be in old affine
    new_affine = np.copy(img_in.affine)
    new_affine[0, 0] = new_spacing if img_in.affine[0, 0] > 0 else -new_spacing
    new_affine[1, 1] = new_spacing if img_in.affine[1, 1] > 0 else -new_spacing
    new_affine[2, 2] = new_spacing if img_in.affine[2, 2] > 0 else -new_spacing

    # new_shape = np.floor(np.array(img_in.get_data().shape) * (img_spacing / new_spacing))
    new_shape = np.floor(
        np.array(img_in.header.get_data_shape()) * (img_spacing / new_spacing))

    new_shape = new_shape[:3]  # drop last dim

    new_data = []
    for i in range(data.shape[3]):
        affine_map = AffineMap(np.eye(4), new_shape, new_affine, old_shape,
                               img_in.affine)
        # Generally "nearest" a bit better results than "linear" interpolation
        res = affine_map.transform(data[:, :, :, i], interp="nearest")
        new_data.append(res)

    new_data = np.array(new_data).transpose(1, 2, 3, 0)
    img_new = nib.Nifti1Image(new_data, new_affine)

    return img_new
Exemplo n.º 2
0
def read_source_morph(fname):
    """Load the morph for source estimates from a file.

    Parameters
    ----------
    fname : str
        Full filename including path.

    Returns
    -------
    source_morph : instance of SourceMorph
        The loaded morph.
    """
    vals = read_hdf5(fname)
    if vals['pre_affine'] is not None:  # reconstruct
        from dipy.align.imaffine import AffineMap
        affine = vals['pre_affine']
        vals['pre_affine'] = AffineMap(None)
        vals['pre_affine'].__dict__ = affine
    if vals['sdr_morph'] is not None:
        from dipy.align.imwarp import DiffeomorphicMap
        morph = vals['sdr_morph']
        vals['sdr_morph'] = DiffeomorphicMap(None, [])
        vals['sdr_morph'].__dict__ = morph
    return SourceMorph(**vals)
Exemplo n.º 3
0
def read_source_morph(fname):
    """Load the morph for source estimates from a file.

    Parameters
    ----------
    fname : str
        Full filename including path.

    Returns
    -------
    source_morph : instance of SourceMorph
        The loaded morph.
    """
    vals = read_hdf5(fname)
    if vals['pre_affine'] is not None:  # reconstruct
        from dipy.align.imaffine import AffineMap
        affine = vals['pre_affine']
        vals['pre_affine'] = AffineMap(None)
        vals['pre_affine'].__dict__ = affine
    if vals['sdr_morph'] is not None:
        from dipy.align.imwarp import DiffeomorphicMap
        morph = vals['sdr_morph']
        vals['sdr_morph'] = DiffeomorphicMap(None, [])
        vals['sdr_morph'].__dict__ = morph
    return SourceMorph(**vals)
Exemplo n.º 4
0
def change_spacing_4D(img_in, new_spacing=1.25):
    from dipy.align.imaffine import AffineMap

    data = img_in.get_data()
    old_shape = data.shape
    img_spacing = abs(img_in.get_affine()[0, 0])

    # copy very important; otherwise new_affine changes will also be in old affine
    new_affine = np.copy(img_in.get_affine())
    new_affine[0, 0] = new_spacing if img_in.get_affine()[0, 0] > 0 else -new_spacing
    new_affine[1, 1] = new_spacing if img_in.get_affine()[1, 1] > 0 else -new_spacing
    new_affine[2, 2] = new_spacing if img_in.get_affine()[2, 2] > 0 else -new_spacing

    new_shape = np.floor(np.array(img_in.get_data().shape) * (img_spacing / new_spacing))
    new_shape = new_shape[:3]  # drop last dim

    new_data = []
    for i in range(data.shape[3]):
        affine_map = AffineMap(np.eye(4),
                               new_shape, new_affine,
                               old_shape, img_in.get_affine()
                               )
        #Generally nearest a bit better results than linear interpolation
        # res = affine_map.transform(data[:,:,:,i], interp="linear")
        res = affine_map.transform(data[:, :, :, i], interp="nearest")
        new_data.append(res)

    new_data = np.array(new_data).transpose(1, 2, 3, 0)
    img_new = nib.Nifti1Image(new_data, new_affine)

    return img_new
Exemplo n.º 5
0
def resample(moving, static, moving_grid2world, static_grid2world):
    """
    """
    identity = np.eye(4)
    affine_map = AffineMap(identity, static.shape, static_grid2world,
                           moving.shape, moving_grid2world)
    resampled = affine_map.transform(moving)
Exemplo n.º 6
0
def transform_anatomy(transfo, reference, moving, filename_to_save,
                      interp='linear', keep_dtype=False):
    """
    Apply transformation to an image using Dipy's tool

    Parameters
    ----------
    transfo: numpy.ndarray
        Transformation matrix to be applied
    reference: str
        Filename of the reference image (target)
    moving: str
        Filename of the moving image
    filename_to_save: str
        Filename of the output image
    interp : string, either 'linear' or 'nearest'
        the type of interpolation to be used, either 'linear'
        (for k-linear interpolation) or 'nearest' for nearest neighbor
    keep_dtype : bool
        If True, keeps the data_type of the input moving image when saving
        the output image
    """
    grid2world, dim, _, _ = get_reference_info(reference)
    static_data = nib.load(reference).get_fdata(dtype=np.float32)

    nib_file = nib.load(moving)
    curr_type = nib_file.get_data_dtype()
    if keep_dtype:
        moving_data = np.asanyarray(nib_file.dataobj).astype(curr_type)
    else:
        moving_data = nib_file.get_fdata(dtype=np.float32)
    moving_affine = nib_file.affine

    if moving_data.ndim == 3 and isinstance(moving_data[0, 0, 0],
                                            np.ScalarType):
        orig_type = moving_data.dtype
        affine_map = AffineMap(np.linalg.inv(transfo),
                               dim, grid2world,
                               moving_data.shape, moving_affine)
        resampled = affine_map.transform(moving_data.astype(np.float64),
                                         interpolation=interp)
        nib.save(nib.Nifti1Image(resampled.astype(orig_type), grid2world),
                 filename_to_save)
    elif len(moving_data[0, 0, 0]) > 1:
        if isinstance(moving_data[0, 0, 0], np.void):
            raise ValueError('Does not support TrackVis RGB')

        affine_map = AffineMap(np.linalg.inv(transfo),
                               dim[0:3], grid2world,
                               moving_data.shape[0:3], moving_affine)

        orig_type = moving_data.dtype
        resampled = transform_dwi(affine_map, static_data, moving_data,
                                  interpolation=interp)
        nib.save(nib.Nifti1Image(resampled.astype(orig_type), grid2world),
                 filename_to_save)
    else:
        raise ValueError('Does not support this dataset (shape, type, etc)')
Exemplo n.º 7
0
def resample(moving, static, moving_grid2world, static_grid2world):
    """

    """
    identity = np.eye(4)
    affine_map = AffineMap(identity,
                           static.shape, static_grid2world,
                           moving.shape, moving_grid2world)
    resampled = affine_map.transform(moving)
Exemplo n.º 8
0
def exampleDipy():
	
    # example obtained from: http://nipy.org/dipy/examples_built/syn_registration_2d.html
    import ssl
    if hasattr(ssl, '_create_unverified_context'):
        ssl._create_default_https_context = ssl._create_unverified_context
    from dipy.data import fetch_stanford_hardi, read_stanford_hardi
    fetch_stanford_hardi()
    nib_stanford, gtab_stanford = read_stanford_hardi()
    stanford_b0 = np.squeeze(nib_stanford.get_data())[..., 0]

    from dipy.data.fetcher import fetch_syn_data, read_syn_data
    fetch_syn_data()
    nib_syn_t1, nib_syn_b0 = read_syn_data()
    syn_b0 = np.array(nib_syn_b0.get_data())

    from dipy.segment.mask import median_otsu

    stanford_b0_masked, stanford_b0_mask = median_otsu(stanford_b0, 4, 4)
    syn_b0_masked, syn_b0_mask = median_otsu(syn_b0, 4, 4)

    static = stanford_b0_masked
    static_affine = nib_stanford.affine
    moving = syn_b0_masked
    moving_affine = nib_syn_b0.affine

    pre_align = np.array(
        [[1.02783543e+00, -4.83019053e-02, -6.07735639e-02, -2.57654118e+00],
         [4.34051706e-03, 9.41918267e-01, -2.66525861e-01, 3.23579799e+01],
         [5.34288908e-02, 2.90262026e-01, 9.80820307e-01, -1.46216651e+01],
         [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 1.00000000e+00]])

    from dipy.align.imaffine import AffineMap
    affine_map = AffineMap(pre_align,
                           static.shape, static_affine,
                           moving.shape, moving_affine)

    resampled = affine_map.transform(moving)

    metric = CCMetric(3)

    level_iters = [10, 10, 5]
    sdr = SymmetricDiffeomorphicRegistration(metric, level_iters)

    mapping = sdr.optimize(static, moving, static_affine, moving_affine,
                           pre_align)

    warped_moving = mapping.transform(moving)

    for slice in range(41 - 12, 41 + 13):
        regtools.overlay_slices(static, resampled, slice, 1, 'Static',
                                'Pre Moving',
                                'GIFexample1/' + str(slice) + 'T1pre.png')
        regtools.overlay_slices(static, warped_moving, slice, 1, 'Static',
                                'Post moving',
                                'GIFexample1/' + str(slice) + 'T1post.png')
Exemplo n.º 9
0
    def register_mask(mask_data,
                      mask_affine,
                      reference_img,
                      elastic_transform=None,
                      binary_img=True,
                      use_inverse=False):
        '''
        Transform a mask (binary image) with the given elastic_transform

        :param mask_data:            data of the mask that should be transformed
        :param mask_affine:     affine of the mask that should be transformed
        :param reference_img:   a nibabel image to get shape and affine from for the Affine Transformation
        :param elastic_transform:
        :param binary_img:      is input a float image (eg T1) or a binary image (eg a mask)

        :return: transformed mask (a binary Image)
        '''

        logging.debug("mask original shape: {}".format(mask_data.shape))

        # Apply affine for mask image (to t1 space)
        affine_map_inv = AffineMap(
            np.eye(4),
            reference_img.get_data().shape,
            Utils.invert_x_and_y(reference_img.get_affine()), mask_data.shape,
            Utils.invert_x_and_y(mask_affine)
        )  # If I do not use invert_x_and_y for source and target, result is identical
        mask_data_reg = affine_map_inv.transform(mask_data)
        if binary_img:
            mask_data_reg = mask_data_reg > 0
        logging.debug("mask registered shape: {}".format(mask_data_reg.shape))

        if elastic_transform:

            # img = nib.Nifti1Image(mask_data_reg.astype(np.uint8), reference_img.get_affine())
            # nib.save(img, "ROI_registered_before.nii.gz")

            if use_inverse:
                mask_data_reg = elastic_transform.transform_inverse(
                    mask_data_reg)
            else:
                mask_data_reg = elastic_transform.transform(mask_data_reg)

            if binary_img:
                mask_data_reg = mask_data_reg > 0

            # img = nib.Nifti1Image(mask_data_reg.astype(np.uint8), reference_img.get_affine())
            # nib.save(img, "ROI_registered_after.nii.gz")

        else:
            logging.warning(
                "Elastic Transform deactivated; only using Affine Transform")

        if binary_img:
            mask_data_reg = mask_data_reg > 0
        return mask_data_reg
Exemplo n.º 10
0
    def run(self, static_image_file, moving_image_files, affine_matrix_file,
            out_dir='', out_file='transformed.nii.gz'):

        """
        Parameters
        ----------
        static_image_file : string
            Path of the static image file.

        moving_image_files : string
            Path of the moving image(s). It can be a single image or a
            folder containing multiple images.

        affine_matrix_file : string
            The text file containing the affine matrix for transformation.

        out_dir : string, optional
            Directory to save the transformed files (default '').

        out_file : string, optional
            Name of the transformed file (default 'transformed.nii.gz').
             It is recommended to use the flag --mix-names to
              prevent the output files from being overwritten.

        """
        io = self.get_io_iterator()

        for static_image_file, moving_image_file, affine_matrix_file, \
                out_file in io:

            # Loading the image data from the input files into object.
            static_image, static_grid2world = load_nifti(static_image_file)

            moving_image, moving_grid2world = load_nifti(moving_image_file)

            # Doing a sanity check for validating the dimensions of the input
            # images.
            ImageRegistrationFlow.check_dimensions(static_image, moving_image)

            # Loading the affine matrix.
            affine_matrix = np.loadtxt(affine_matrix_file)

            # Setting up the affine transformation object.
            img_transformation = AffineMap(
                affine=affine_matrix,
                domain_grid_shape=static_image.shape,
                domain_grid2world=static_grid2world,
                codomain_grid_shape=moving_image.shape,
                codomain_grid2world=moving_grid2world)

            # Transforming the image/
            transformed = img_transformation.transform(moving_image)

            save_nifti(out_file, transformed, affine=static_grid2world)
Exemplo n.º 11
0
 def estimate_rigid_projz(self, fixed, moving, tx_tr=None):
     # this returns a 3d rotation matrix
     assert len(moving.shape) == len(fixed.shape)
     if tx_tr is None:
         tmp = self.estimate_rigid2d(fixed.mean(axis=0),
                                     moving.mean(axis=0))
         tmp = tmp.affine
         tx_tr = np.eye(4)
         tx_tr[1:, 1:] = tmp
     else:
         if isinstance(tx_tr, AffineMap):
             tx_tr = tx_tr.affine
         if tx_tr.shape[0] == 3:
             tmp = np.eye(4)
             tmp[1:, 1:] = tx_tr
             tx_tr = tmp
         tmp = self.estimate_rigid2d(fixed.mean(axis=0),
                                     moving.mean(axis=0),
                                     tx_tr=tx_tr)
         tmp = tmp.affine
         tx_tr = np.eye(4)
         tx_tr[1:, 1:] = tmp
     return AffineMap(tx_tr,
                      domain_grid_shape=fixed.shape,
                      codomain_grid_shape=moving.shape)
Exemplo n.º 12
0
def apply_affine_tform(volume,
                       matrix,
                       sampling_grid_shape=None,
                       check_bounds=False,
                       contain_all=False,
                       domain_grid_shape=None,
                       codomain_grid_shape=None,
                       domain_grid2world=None,
                       codomain_grid2world=None,
                       sampling_grid2world=None):
    """
    given a homogeneous transformation matrix, create an affine matrix and use dipy to apply the transformation.
    """

    import numpy as np
    from dipy.align.imaffine import AffineMap

    if domain_grid_shape is None:
        domain_grid_shape = volume.shape
    if codomain_grid_shape is None:
        codomain_grid_shape = volume.shape

    if check_bounds:
        if contain_all:
            in_out_corners, out_shape, tilt_tf_ = compute_transform_bounds(
                domain_grid_shape, matrix, contain_all=True)
        else:
            in_out_corners, out_shape = compute_transform_bounds(
                domain_grid_shape, matrix, contain_all=False)
            tilt_tf_ = None
#        print out_shape
    affine_map = AffineMap(matrix,
                           domain_grid_shape=domain_grid_shape,
                           domain_grid2world=domain_grid2world,
                           codomain_grid_shape=codomain_grid_shape,
                           codomain_grid2world=codomain_grid2world)

    if check_bounds:
        out = affine_map.transform(volume,
                                   sampling_grid_shape=out_shape,
                                   sampling_grid2world=tilt_tf_)
    else:
        out = affine_map.transform(volume,
                                   sampling_grid_shape=sampling_grid_shape,
                                   sampling_grid2world=sampling_grid2world)

    return out
Exemplo n.º 13
0
def resample(moving,
             static,
             moving_affine=None,
             static_affine=None,
             between_affine=None):
    """Resample an image (moving) from one space to another (static).

    Parameters
    ----------
    moving : array, nifti image or str
        Containing the data for the moving object, or full path to a nifti file
        with the moving data.

    moving_affine : 4x4 array, optional
        An affine transformation associated with the moving object. Required if
        data is provided as an array. If provided together with nifti/path,
        will over-ride the affine that is in the nifti.

    static : array, nifti image or str
        Containing the data for the static object, or full path to a nifti file
        with the moving data.

    static_affine : 4x4 array, optional
        An affine transformation associated with the static object. Required if
        data is provided as an array. If provided together with nifti/path,
        will over-ride the affine that is in the nifti.

    between_affine: 4x4 array, optional
        If an additional affine is needed betweeen the two spaces.
        Default: identity (no additional registration).

    Returns
    -------
    A Nifti1Image class instance with the data from the moving object
    resampled into the space of the static object.

    """

    static, static_affine, moving, moving_affine, between_affine = \
        _handle_pipeline_inputs(moving, static,
                                moving_affine=moving_affine,
                                static_affine=static_affine,
                                starting_affine=between_affine)
    affine_map = AffineMap(between_affine, static.shape, static_affine,
                           moving.shape, moving_affine)
    resampled = affine_map.transform(moving)
    return nib.Nifti1Image(resampled, static_affine)
Exemplo n.º 14
0
Arquivo: image.py Projeto: BIG-S2/PSC
def transform_anatomy(transfo, reference, moving, filename_to_save):
    dim, grid2world = get_reference_info(reference)

    moving_data, nib_file = get_data(moving, return_object=True)
    moving_affine = nib_file.affine

    if len(moving_data.shape) > 3:
        raise ValueError('Can only transform 3D images')

    affine_map = AffineMap(np.linalg.inv(transfo),
                           dim, grid2world,
                           moving_data.shape, moving_affine)

    resampled = affine_map.transform(moving_data)

    nib.save(nib.Nifti1Image(resampled, grid2world),
             filename_to_save)
Exemplo n.º 15
0
def transform_anatomy(transfo, reference, moving, filename_to_save):
    """
    Apply transformation to an image using Dipy's tool

    Parameters
    ----------
    transfo: numpy.ndarray
        Transformation matrix to be applied
    reference: str
        Filename of the reference image (target)
    moving: str
        Filename of the moving image
    filename_to_save: str
        Filename of the output image
    """
    dim, grid2world = get_reference_info(reference)
    static_data = get_data(reference)

    moving_data, nib_file = get_data(moving, return_object=True)
    moving_affine = nib_file.affine

    if moving_data.ndim == 3 and isinstance(moving_data[0, 0, 0],
                                            np.ScalarType):
        orig_type = moving_data.dtype
        affine_map = AffineMap(np.linalg.inv(transfo),
                               dim, grid2world,
                               moving_data.shape, moving_affine)
        resampled = affine_map.transform(moving_data.astype(np.float64))
        nib.save(nib.Nifti1Image(resampled.astype(orig_type), grid2world),
                 filename_to_save)
    elif len(moving_data[0, 0, 0]) > 1:
        if isinstance(moving_data[0, 0, 0], np.void):
            raise ValueError('Does not support TrackVis RGB')

        affine_map = AffineMap(np.linalg.inv(transfo),
                               dim[0:3], grid2world,
                               moving_data.shape[0:3], moving_affine)

        orig_type = moving_data.dtype
        resampled = transform_dwi(affine_map, static_data, moving_data)
        nib.save(nib.Nifti1Image(resampled.astype(orig_type), grid2world),
                 filename_to_save)
    else:
        raise ValueError('Does not support this dataset (shape, type, etc)')
Exemplo n.º 16
0
def resample_volume(moving, static):
    """ 
    Resample a nifti image into the space of another nifti image
    
    Parameters
    ----------
    moving : Nifti1Image
        The 'source' image.
    static : Nifti1Image
        The 'target' image.
        
    Returns
    -------
    resampled_img : Nifti1Image
       The source data in the target space, with the target affine
    """
    affine_map = AffineMap(np.eye(4),
                           static.shape[:3], static.affine, 
                           moving.shape, moving.affine)
    
    resampled = affine_map.transform(moving.get_data())
    return nib.Nifti1Image(resampled, static.get_affine())
Exemplo n.º 17
0
def resample(moving, static, moving_affine, static_affine):
    """Resample an image from one space to another.

    Parameters
    ----------
    moving : array
       The image to be resampled

    static : array

    moving_affine
    static_affine

    Returns
    -------
    resampled : the moving array resampled into the static array's space.
    """
    identity = np.eye(4)
    affine_map = AffineMap(identity, static.shape, static_affine, moving.shape,
                           moving_affine)
    resampled = affine_map.transform(moving)
    return resampled
Exemplo n.º 18
0
    def get_elastic_transform(subject_fa, atlas_fa, subject_path=".."):
        '''
        :param subject_fa: the FA (nibabel img) of a static image of a subject       (static)
        :param atlas_fa:  the FA (nibabel img) of an atlas (Atlas will be warped onto subject)   (moving)

        :return: elastic transformation map
        '''

        if isfile(subject_path + "/FAReg_elastic_transform.pklz"):
            logging.debug("Load existing elastic transform...")
            return Utils.load_pkl_compressed(subject_path +
                                             "/FAReg_elastic_transform.pklz")

        static_img = subject_fa
        static = static_img.get_data()
        moving_img = atlas_fa
        moving = moving_img.get_data()

        # Optional (affine transformation of moving image to static coordinate system) -> needed if on very different ones!
        affine_map = AffineMap(np.eye(4), static.shape,
                               static_img.get_affine(), moving.shape,
                               moving_img.get_affine())
        moving = affine_map.transform(moving)

        start_time = time.time()
        metric = CCMetric(3)
        level_iters = [10, 10, 5]  # better
        # level_iters = [2, 2, 2] #fast -> not much
        sdr = SymmetricDiffeomorphicRegistration(metric, level_iters)
        mapping = sdr.optimize(static, moving)
        # mapping = sdr.optimize(static, moving, Utils.invert_x_and_y(static_img.get_affine()), Utils.invert_x_and_y(moving_img.get_affine())) #not needed
        logging.debug("elastic transform took {0:.2f}s".format(time.time() -
                                                               start_time))

        logging.debug("write elastic transform...")
        Utils.save_pkl_compressed(
            subject_path + "/FAReg_elastic_transform.pklz", mapping)
        return mapping
Exemplo n.º 19
0
def resample(moving, static, moving_affine, static_affine):
    """Resample an image from one space to another.

    Parameters
    ----------
    moving : array
       The image to be resampled

    static : array

    moving_affine
    static_affine

    Returns
    -------
    resampled : the moving array resampled into the static array's space.
    """
    identity = np.eye(4)
    affine_map = AffineMap(identity,
                           static.shape, static_affine,
                           moving.shape, moving_affine)
    resampled = affine_map.transform(moving)
    return resampled
Exemplo n.º 20
0
def Sequential_Registration_b0(static,
                               static_grid2world,
                               moving,
                               moving_grid2world,
                               level_iters=[5],
                               sigmas=[3.0],
                               factors=[2]):
    pipeline = [center_of_mass, translation, rigid, affine]
    xformed_img, reg_affine = affine_registration(
        moving,
        static,
        moving_affine=moving_grid2world,
        static_affine=static_grid2world,
        nbins=16,
        metric='MI',
        pipeline=pipeline,
        level_iters=level_iters,
        sigmas=sigmas,
        factors=factors)
    affine_map = AffineMap(reg_affine, static.shape, static_grid2world,
                           moving.shape, moving_grid2world)
    return xformed_img, reg_affine, affine_map
Exemplo n.º 21
0
fetch_syn_data()
nib_syn_t1, nib_syn_b0 = read_syn_data()
moving = np.array(nib_syn_b0.get_data())
moving_grid2world = nib_syn_b0.affine

"""
We can see that the images are far from aligned by drawing one on top of
the other. The images don't even have the same number of voxels, so in order
to draw one on top of the other we need to resample the moving image on a grid
of the same dimensions as the static image, we can do this by "transforming"
the moving image using an identity transform
"""

identity = np.eye(4)
affine_map = AffineMap(identity,
                       static.shape, static_grid2world,
                       moving.shape, moving_grid2world)
resampled = affine_map.transform(moving)
regtools.overlay_slices(static, resampled, None, 0,
                        "Static", "Moving", "resampled_0.png")
regtools.overlay_slices(static, resampled, None, 1,
                        "Static", "Moving", "resampled_1.png")
regtools.overlay_slices(static, resampled, None, 2,
                        "Static", "Moving", "resampled_2.png")

"""
.. figure:: resampled_0.png
   :align: center
.. figure:: resampled_1.png
   :align: center
.. figure:: resampled_2.png
Exemplo n.º 22
0
def anatOverlay(dwi, t1):
    if t1['file'].split("acq-")[-1] != t1['file']:
        t1_acq = '_acq-' + t1['file'].split("acq-")[-1].split("_")[0]
    else:
        t1_acq = ''

    imgT1 = nib.load(t1['file'])
    img = nib.load(dwi['denoised'])

    b0_affine = img.affine
    b0 = dwi['b0']

    b0_mask = dwi['mask']

    b0 = b0 * b0_mask
    t1 = imgT1.get_data()
    t1_affine = imgT1.affine

    (t1_affine, perm, flip_sign) = helper.fixImageHeader(imgT1)

    t1 = np.transpose(t1, perm)

    if flip_sign[0] < 0:
        t1 = t1[::-1, :, :]

    if flip_sign[1] < 0:
        t1 = t1[:, ::-1, :]

    if flip_sign[2] < 0:
        t1 = t1[:, :, ::-1]

    affine_map = AffineMap(np.eye(4), t1.shape, t1_affine, b0.shape, b0_affine)

    resampled = affine_map.transform(np.array(b0))

    # Normalize the input images to [0,255]
    t1 = helper.normImg(t1)
    b0 = helper.normImg(resampled)

    overlay = np.zeros(shape=(t1.shape) + (3, ), dtype=np.uint8)
    b0_canny = np.zeros(shape=(t1.shape), dtype=np.bool)

    ind = helper.getImgThirds(t1)

    for i in ind[0]:
        b0_canny[:, :, i] = feature.canny(b0[:, :, i], sigma=1.5)

    for i in ind[1]:
        b0_canny[:, i, :] = feature.canny(np.squeeze(b0[:, i, :]), sigma=1.5)

    for i in ind[2]:
        #b0_canny[i-1,:,:] = feature.canny(np.squeeze(b0[i-1,:,:]), sigma=1.5)
        b0_canny[i, :, :] = feature.canny(np.squeeze(b0[i, :, :]), sigma=1.5)
        #b0_canny[i+1,:,:] = feature.canny(np.squeeze(b0[i+1,:,:]), sigma=1.5)

    overlay[..., 0] = t1
    overlay[..., 1] = t1
    overlay[..., 2] = t1
    overlay[..., 0] = b0_canny * 255

    voxSize = imgT1.header['pixdim'][1:4]

    helper.plotFig(overlay, 'alignment DWI -> T1', voxSize)  #[perm])
    plot_name = 't1' + t1_acq + '_overlay.png'
    plt.savefig(os.path.join(dwi['fig_dir'], plot_name), bbox_inches='tight')
    plt.close()
Exemplo n.º 23
0
    get_direction_and_spacings(moving_grid2world, dim)

from dipy.align.vector_fields import _gradient_3d
out_shape = static.shape
ftype = moving.dtype.type
out = np.empty(tuple(out_shape) + (dim, ), dtype=ftype)
inside = np.empty(tuple(out_shape), dtype=np.int32)
_gradient_3d(moving, moving_world2grid, moving_spacing, static_grid2world, out,
             inside)

mgrad = np.asarray(out)

from dipy.align.imaffine import AffineMap
dim = len(static.shape)
starting_affine = np.eye(dim + 1)
affine_map = AffineMap(starting_affine, static.shape, static_grid2world,
                       moving.shape, moving_grid2world)

static_values = static
moving_values = affine_map.transform(moving)

from dipy.align.transforms import AffineTransform3D
transform = AffineTransform3D()
params = transform.get_identity_parameters()

from dipy.align.parzenhist import ParzenJointHistogram
nbins = 32
histogram = ParzenJointHistogram(nbins)

static2prealigned = static_grid2world
histogram.update_gradient_dense(params, transform, static_values,
                                moving_values, static2prealigned, mgrad)
Exemplo n.º 24
0
                mode='constant',
                constant_values=0)

static_grid2world = static_affine
"""
Let's create a moving image by transforming the static image.

"""

affmat = np.eye(4)
affmat[0, -1] = 4
affmat[1, -1] = 12
theta = 0.1
c, s = np.cos(theta), np.sin(theta)
affmat[0:2, 0:2] = np.array([[c, -s], [s, c]])
affine_map = AffineMap(affmat, static.shape, static_grid2world, static.shape,
                       static_grid2world)
moving = affine_map.transform(static)
moving_affine = static_affine.copy()
moving_grid2world = static_grid2world.copy()

regtools.overlay_slices(static, moving, None, 2, "Static", "Moving",
                        "deregistered.png")
"""
.. figure:: deregistered.png
   :align: center

   Same images but misaligned.
"""
"""
Let's make some registration settings.
"""
Exemplo n.º 25
0
def test_affine_map():
    np.random.seed(2112927)
    dom_shape = np.array([64, 64, 64], dtype=np.int32)
    cod_shape = np.array([80, 80, 80], dtype=np.int32)
    nx = dom_shape[0]
    ny = dom_shape[1]
    nz = dom_shape[2]
    # Radius of the circle/sphere (testing image)
    radius = 16
    # Rotation axis (used for 3D transforms only)
    rot_axis = np.array([.5, 2.0, 1.5])
    # Arbitrary transform parameters
    t = 0.15
    rotations = [-1 * np.pi / 10.0, 0.0, np.pi / 10.0]
    scales = [0.9, 1.0, 1.1]
    for dim in [2, 3]:
        # Setup current dimension
        if dim == 2:
            # Create image of a circle
            img = vf.create_circle(cod_shape[0], cod_shape[1], radius)
            oracle_linear = vf.transform_2d_affine
            oracle_nn = vf.transform_2d_affine_nn
        else:
            # Create image of a sphere
            img = vf.create_sphere(cod_shape[0], cod_shape[1], cod_shape[2],
                                   radius)
            oracle_linear = vf.transform_3d_affine
            oracle_nn = vf.transform_3d_affine_nn
        img = np.array(img)
        # Translation is the only parameter differing for 2D and 3D
        translations = [t * dom_shape[:dim]]
        # Generate affine transforms
        gt_affines = create_affine_transforms(dim, translations, rotations,
                                              scales, rot_axis)
        # Include the None case
        gt_affines.append(None)

        # testing str/format/repr
        for affine_mat in gt_affines:
            aff_map = AffineMap(affine_mat)
            assert_equal(str(aff_map), aff_map.__str__())
            assert_equal(repr(aff_map), aff_map.__repr__())
            for spec in ['f', 'r', 't', '']:
                assert_equal(format(aff_map, spec), aff_map.__format__(spec))

        for affine in gt_affines:

            # make both domain point to the same physical region
            # It's ok to use the same transform, we just want to test
            # that this information is actually being considered
            domain_grid2world = affine
            codomain_grid2world = affine
            grid2grid_transform = affine

            # Evaluate the transform with vector_fields module (already tested)
            expected_linear = oracle_linear(img, dom_shape[:dim],
                                            grid2grid_transform)
            expected_nn = oracle_nn(img, dom_shape[:dim], grid2grid_transform)

            # Evaluate the transform with the implementation under test
            affine_map = imaffine.AffineMap(affine,
                                            dom_shape[:dim], domain_grid2world,
                                            cod_shape[:dim],
                                            codomain_grid2world)
            actual_linear = affine_map.transform(img, interp='linear')
            actual_nn = affine_map.transform(img, interp='nearest')
            assert_array_almost_equal(actual_linear, expected_linear)
            assert_array_almost_equal(actual_nn, expected_nn)

            # Test set_affine with valid matrix
            affine_map.set_affine(affine)
            if affine is None:
                assert(affine_map.affine is None)
                assert(affine_map.affine_inv is None)
            else:
                # compatibility with previous versions
                assert_array_equal(affine, affine_map.affine)
                # new getter
                new_copy_affine = affine_map.get_affine()
                # value must be the same
                assert_array_equal(affine, new_copy_affine)
                # but not its reference
                assert id(affine) != id(new_copy_affine)
                actual = affine_map.affine.dot(affine_map.affine_inv)
                assert_array_almost_equal(actual, np.eye(dim + 1))

            # Evaluate via the inverse transform

            # AffineMap will use the inverse of the input matrix when we call
            # `transform_inverse`. Since the inverse of the inverse of a matrix
            # is not exactly equal to the original matrix (numerical
            #  limitations) we need to invert the matrix twice to make sure
            # the oracle and the implementation under test apply the same
            # transform
            aff_inv = None if affine is None else npl.inv(affine)
            aff_inv_inv = None if aff_inv is None else npl.inv(aff_inv)
            expected_linear = oracle_linear(img, dom_shape[:dim],
                                            aff_inv_inv)
            expected_nn = oracle_nn(img, dom_shape[:dim], aff_inv_inv)

            affine_map = imaffine.AffineMap(aff_inv,
                                            cod_shape[:dim],
                                            codomain_grid2world,
                                            dom_shape[:dim], domain_grid2world)
            actual_linear = affine_map.transform_inverse(img, interp='linear')
            actual_nn = affine_map.transform_inverse(img, interp='nearest')
            assert_array_almost_equal(actual_linear, expected_linear)
            assert_array_almost_equal(actual_nn, expected_nn)

        # Verify AffineMap can not be created with non-square matrix
        non_square_shapes = [ np.zeros((dim, dim + 1), dtype=np.float64),
                           np.zeros((dim + 1, dim), dtype=np.float64) ]
        for nsq in non_square_shapes:
            assert_raises(AffineInversionError, AffineMap, nsq)

        # Verify incorrect augmentations are caught
        for affine_mat in gt_affines:
            aff_map = AffineMap(affine_mat)
            if affine_mat is None:
                continue
            bad_aug = aff_map.get_affine()
            # no zeros in the first n-1 columns on last row
            bad_aug[-1,:] = 1
            assert_raises(AffineInvalidValuesError, AffineMap, bad_aug)

            bad_aug = aff_map.get_affine()
            bad_aug[-1, -1] = 0  # lower right not 1
            assert_raises(AffineInvalidValuesError, AffineMap, bad_aug)

        # Verify AffineMap cannot be created with a non-invertible matrix
        invalid_nan = np.zeros((dim + 1, dim + 1), dtype=np.float64)
        invalid_nan[1, 1] = np.nan
        invalid_zeros = np.zeros((dim + 1, dim + 1), dtype=np.float64)
        assert_raises(
            imaffine.AffineInvalidValuesError,
            imaffine.AffineMap,
            invalid_nan)
        assert_raises(
            AffineInvalidValuesError,
            imaffine.AffineMap,
            invalid_zeros)

        # Test exception is raised when the affine transform matrix is not
        # valid
        invalid_shape = np.eye(dim)
        affmap_invalid_shape = imaffine.AffineMap(invalid_shape,
                                                  dom_shape[:dim], None,
                                                  cod_shape[:dim], None)
        assert_raises(ValueError, affmap_invalid_shape.transform, img)
        assert_raises(ValueError, affmap_invalid_shape.transform_inverse, img)

        # Verify exception is raised when sampling info is not provided
        valid = np.eye(3)
        affmap_invalid_shape = imaffine.AffineMap(valid)
        assert_raises(ValueError, affmap_invalid_shape.transform, img)
        assert_raises(ValueError, affmap_invalid_shape.transform_inverse, img)

        # Verify exception is raised when requesting an invalid interpolation
        assert_raises(ValueError, affine_map.transform, img, 'invalid')
        assert_raises(ValueError, affine_map.transform_inverse, img, 'invalid')

        # Verify exception is raised when attempting to warp an image of
        # invalid dimension
        for dim in [2, 3]:
            affine_map = imaffine.AffineMap(np.eye(dim),
                                            cod_shape[:dim], None,
                                            dom_shape[:dim], None)
            for sh in [(2,), (2, 2, 2, 2)]:
                img = np.zeros(sh)
                assert_raises(ValueError, affine_map.transform, img)
                assert_raises(ValueError, affine_map.transform_inverse, img)
            aff_sing = np.zeros((dim + 1, dim + 1))
            aff_nan = np.zeros((dim + 1, dim + 1))
            aff_nan[...] = np.nan
            aff_inf = np.zeros((dim + 1, dim + 1))
            aff_inf[...] = np.inf

            assert_raises(
                AffineInvalidValuesError,
                affine_map.set_affine,
                aff_sing)
            assert_raises(AffineInvalidValuesError, affine_map.set_affine, aff_nan)
            assert_raises(AffineInvalidValuesError, affine_map.set_affine, aff_inf)

    # Verify AffineMap can not be created with non-2D matrices : len(shape) != 2
    for dim_not_2 in range(10):
        if dim_not_2 != _number_dim_affine_matrix:
            mat_large_dim = np.random.random([2]*dim_not_2)
            assert_raises(AffineInversionError, AffineMap, mat_large_dim)
Exemplo n.º 26
0
def affine_registration(moving,
                        static,
                        moving_affine=None,
                        static_affine=None,
                        pipeline=None,
                        starting_affine=None,
                        metric='MI',
                        level_iters=None,
                        sigmas=None,
                        factors=None,
                        **metric_kwargs):
    """
    Find the affine transformation between two 3D images.

    Parameters
    ----------
    moving : array, nifti image or str
        Containing the data for the moving object, or full path to a nifti file
        with the moving data.

    moving_affine : 4x4 array, optional
        An affine transformation associated with the moving object. Required if
        data is provided as an array. If provided together with nifti/path,
        will over-ride the affine that is in the nifti.

    static : array, nifti image or str
        Containing the data for the static object, or full path to a nifti file
        with the moving data.

    static_affine : 4x4 array, optional
        An affine transformation associated with the static object. Required if
        data is provided as an array. If provided together with nifti/path,
        will over-ride the affine that is in the nifti.

    pipeline : sequence, optional
        Sequence of transforms to use in the gradual fitting of the full
        affine. Default: (executed from left to right):
        `[center_of_mass, translation, rigid, affine]`

    starting_affine: 4x4 array, optional
        Initial guess for the transformation between the spaces.
        Default: identity.

    metric : str, optional.
        Currently only supports 'MI' for MutualInformationMetric.

    nbins : int, optional
        MutualInformationMetric key-word argument: the number of bins to be
        used for computing the intensity histograms. The default is 32.

    sampling_proportion : None or float in interval (0, 1], optional
        MutualInformationMetric key-word argument: There are two types of
        sampling: dense and sparse. Dense sampling uses all voxels for
        estimating the (joint and marginal) intensity histograms, while
        sparse sampling uses a subset of them. If `sampling_proportion` is
        None, then dense sampling is used. If `sampling_proportion` is a
        floating point value in (0,1] then sparse sampling is used,
        where `sampling_proportion` specifies the proportion of voxels to
        be used. The default is None (dense sampling).

    level_iters : sequence, optional
        AffineRegistration key-word argument: the number of iterations at each
        scale of the scale space. `level_iters[0]` corresponds to the coarsest
        scale, `level_iters[-1]` the finest, where n is the length of the
        sequence. By default, a 3-level scale space with iterations
        sequence equal to [10000, 1000, 100] will be used.

    sigmas : sequence of floats, optional
        AffineRegistration key-word argument: custom smoothing parameter to
        build the scale space (one parameter for each scale). By default,
        the sequence of sigmas will be [3, 1, 0].

    factors : sequence of floats, optional
        AffineRegistration key-word argument: custom scale factors to build the
        scale space (one factor for each scale). By default, the sequence of
        factors will be [4, 2, 1].

    Returns
    -------
    transformed, affine : array with moving data resampled to the static space
    after computing the affine transformation and the affine 4x4
    associated with the transformation.


    Notes
    -----
    Performs a gradual registration between the two inputs, using a pipeline
    that gradually approximates the final registration. If the final default
    step (`affine`) is ommitted, the resulting affine may not have all 12
    degrees of freedom adjusted.
    """
    pipeline = pipeline or [center_of_mass, translation, rigid, affine]
    level_iters = level_iters or [10000, 1000, 100]
    sigmas = sigmas or [3, 1, 0.0]
    factors = factors or [4, 2, 1]

    static, static_affine, moving, moving_affine, starting_affine = \
        _handle_pipeline_inputs(moving, static,
                                moving_affine=moving_affine,
                                static_affine=static_affine,
                                starting_affine=starting_affine)

    # Define the Affine registration object we'll use with the chosen metric.
    # For now, there is only one metric (mutual information)
    use_metric = affine_metric_dict[metric](**metric_kwargs)

    affreg = AffineRegistration(metric=use_metric,
                                level_iters=level_iters,
                                sigmas=sigmas,
                                factors=factors)

    # Go through the selected transformation:
    for func in pipeline:
        starting_affine = func(moving,
                               static,
                               static_affine=static_affine,
                               moving_affine=moving_affine,
                               starting_affine=starting_affine,
                               reg=affreg)

    # After doing all that, resample once at the end:
    affine_map = AffineMap(starting_affine, static.shape, static_affine,
                           moving.shape, moving_affine)

    resampled = affine_map.transform(moving)

    return resampled, starting_affine
Exemplo n.º 27
0
def img_reg(moving_img, target_img, reg='non-lin'):

    m_img = nib.load(moving_img)
    t_img = nib.load(target_img)

    m_img_data = m_img.get_data()
    t_img_data = t_img.get_data()

    m_img_affine = m_img.affine
    t_img_affine = t_img.affine

    identity = np.eye(4)
    affine_map = AffineMap(identity, t_img_data.shape, t_img_affine,
                           m_img_data.shape, m_img_affine)

    m_img_resampled = affine_map.transform(m_img_data)

    c_of_mass = transform_centers_of_mass(t_img_data, t_img_affine, m_img_data,
                                          m_img_affine)

    tf_m_img_c_mass = c_of_mass.transform(m_img_data)

    nbins = 32
    sampling_prop = None
    metric = MutualInformationMetric(nbins, sampling_prop)

    level_iters = [10, 10, 5]
    sigmas = [3.0, 1.0, 0.0]
    factors = [4, 2, 1]

    affreg = AffineRegistration(metric=metric,
                                level_iters=level_iters,
                                sigmas=sigmas,
                                factors=factors)

    transform = TranslationTransform3D()
    params0 = None
    starting_affine = c_of_mass.affine
    translation = affreg.optimize(t_img_data,
                                  m_img_data,
                                  transform,
                                  params0,
                                  t_img_affine,
                                  m_img_affine,
                                  starting_affine=starting_affine)

    tf_m_img_translat = translation.transform(m_img_data)

    transform = RigidTransform3D()
    params0 = None
    starting_affine = translation.affine
    rigid = affreg.optimize(t_img_data,
                            m_img_data,
                            transform,
                            params0,
                            t_img_affine,
                            m_img_affine,
                            starting_affine=starting_affine)

    tf_m_img_rigid = rigid.transform(m_img_data)

    transform = AffineTransform3D()
    affreg.level_iters = [10, 10, 10]
    affine = affreg.optimize(t_img_data,
                             m_img_data,
                             transform,
                             params0,
                             t_img_affine,
                             m_img_affine,
                             starting_affine=rigid.affine)

    if reg is None or reg == 'non-lin':

        metric = CCMetric(3)
        level_iters = [10, 10, 5]
        sdr = SymmetricDiffeomorphicRegistration(metric, level_iters)

        mapping = sdr.optimize(t_img_data, m_img_data, t_img_affine,
                               m_img_affine, affine.affine)

        tf_m_img = mapping.transform(m_img_data)

    elif reg == 'affine':

        tf_m_img_aff = affine.transform(m_img_data)

    return tf_m_img

    metric = CCMetric(3)

    level_iters = [10, 10, 5]
    sdr = SymmetricDiffeomorphicRegistration(metric, level_iters)

    mapping = sdr.optimize(t_img_data,
                           m_img_data,
                           t_img_affine,
                           m_img_affine,
                           starting_affine=affine.affine)

    tf_m_img = mapping.transform(m_img_data)
Exemplo n.º 28
0
def test_affine_map():
    np.random.seed(2112927)
    dom_shape = np.array([64, 64, 64], dtype=np.int32)
    cod_shape = np.array([80, 80, 80], dtype=np.int32)
    # Radius of the circle/sphere (testing image)
    radius = 16
    # Rotation axis (used for 3D transforms only)
    rot_axis = np.array([.5, 2.0, 1.5])
    # Arbitrary transform parameters
    t = 0.15
    rotations = [-1 * np.pi / 10.0, 0.0, np.pi / 10.0]
    scales = [0.9, 1.0, 1.1]
    for dim in [2, 3]:
        # Setup current dimension
        if dim == 2:
            # Create image of a circle
            img = vf.create_circle(cod_shape[0], cod_shape[1], radius)
            oracle_linear = vf.transform_2d_affine
            oracle_nn = vf.transform_2d_affine_nn
        else:
            # Create image of a sphere
            img = vf.create_sphere(cod_shape[0], cod_shape[1], cod_shape[2],
                                   radius)
            oracle_linear = vf.transform_3d_affine
            oracle_nn = vf.transform_3d_affine_nn
        img = np.array(img)
        # Translation is the only parameter differing for 2D and 3D
        translations = [t * dom_shape[:dim]]
        # Generate affine transforms
        gt_affines = create_affine_transforms(dim, translations, rotations,
                                              scales, rot_axis)
        # Include the None case
        gt_affines.append(None)

        # testing str/format/repr
        for affine_mat in gt_affines:
            aff_map = AffineMap(affine_mat)
            assert_equal(str(aff_map), aff_map.__str__())
            assert_equal(repr(aff_map), aff_map.__repr__())
            for spec in ['f', 'r', 't', '']:
                assert_equal(format(aff_map, spec), aff_map.__format__(spec))

        for affine in gt_affines:

            # make both domain point to the same physical region
            # It's ok to use the same transform, we just want to test
            # that this information is actually being considered
            domain_grid2world = affine
            codomain_grid2world = affine
            grid2grid_transform = affine

            # Evaluate the transform with vector_fields module (already tested)
            expected_linear = oracle_linear(img, dom_shape[:dim],
                                            grid2grid_transform)
            expected_nn = oracle_nn(img, dom_shape[:dim], grid2grid_transform)

            # Evaluate the transform with the implementation under test
            affine_map = imaffine.AffineMap(affine,
                                            dom_shape[:dim], domain_grid2world,
                                            cod_shape[:dim],
                                            codomain_grid2world)
            actual_linear = affine_map.transform(img, interp='linear')
            actual_nn = affine_map.transform(img, interp='nearest')
            assert_array_almost_equal(actual_linear, expected_linear)
            assert_array_almost_equal(actual_nn, expected_nn)

            # Test set_affine with valid matrix
            affine_map.set_affine(affine)
            if affine is None:
                assert(affine_map.affine is None)
                assert(affine_map.affine_inv is None)
            else:
                # compatibility with previous versions
                assert_array_equal(affine, affine_map.affine)
                # new getter
                new_copy_affine = affine_map.get_affine()
                # value must be the same
                assert_array_equal(affine, new_copy_affine)
                # but not its reference
                assert id(affine) != id(new_copy_affine)
                actual = affine_map.affine.dot(affine_map.affine_inv)
                assert_array_almost_equal(actual, np.eye(dim + 1))

            # Evaluate via the inverse transform

            # AffineMap will use the inverse of the input matrix when we call
            # `transform_inverse`. Since the inverse of the inverse of a matrix
            # is not exactly equal to the original matrix (numerical
            #  limitations) we need to invert the matrix twice to make sure
            # the oracle and the implementation under test apply the same
            # transform
            aff_inv = None if affine is None else npl.inv(affine)
            aff_inv_inv = None if aff_inv is None else npl.inv(aff_inv)
            expected_linear = oracle_linear(img, dom_shape[:dim],
                                            aff_inv_inv)
            expected_nn = oracle_nn(img, dom_shape[:dim], aff_inv_inv)

            affine_map = imaffine.AffineMap(aff_inv,
                                            cod_shape[:dim],
                                            codomain_grid2world,
                                            dom_shape[:dim], domain_grid2world)
            actual_linear = affine_map.transform_inverse(img, interp='linear')
            actual_nn = affine_map.transform_inverse(img, interp='nearest')
            assert_array_almost_equal(actual_linear, expected_linear)
            assert_array_almost_equal(actual_nn, expected_nn)

        # Verify AffineMap can not be created with non-square matrix
        non_square_shapes = [ np.zeros((dim, dim + 1), dtype=np.float64),
                           np.zeros((dim + 1, dim), dtype=np.float64) ]
        for nsq in non_square_shapes:
            assert_raises(AffineInversionError, AffineMap, nsq)

        # Verify incorrect augmentations are caught
        for affine_mat in gt_affines:
            aff_map = AffineMap(affine_mat)
            if affine_mat is None:
                continue
            bad_aug = aff_map.get_affine()
            # no zeros in the first n-1 columns on last row
            bad_aug[-1,:] = 1
            assert_raises(AffineInvalidValuesError, AffineMap, bad_aug)

            bad_aug = aff_map.get_affine()
            bad_aug[-1, -1] = 0  # lower right not 1
            assert_raises(AffineInvalidValuesError, AffineMap, bad_aug)

        # Verify AffineMap cannot be created with a non-invertible matrix
        invalid_nan = np.zeros((dim + 1, dim + 1), dtype=np.float64)
        invalid_nan[1, 1] = np.nan
        invalid_zeros = np.zeros((dim + 1, dim + 1), dtype=np.float64)
        assert_raises(
            imaffine.AffineInvalidValuesError,
            imaffine.AffineMap,
            invalid_nan)
        assert_raises(
            AffineInvalidValuesError,
            imaffine.AffineMap,
            invalid_zeros)

        # Test exception is raised when the affine transform matrix is not
        # valid
        invalid_shape = np.eye(dim)
        affmap_invalid_shape = imaffine.AffineMap(invalid_shape,
                                                  dom_shape[:dim], None,
                                                  cod_shape[:dim], None)
        assert_raises(ValueError, affmap_invalid_shape.transform, img)
        assert_raises(ValueError, affmap_invalid_shape.transform_inverse, img)

        # Verify exception is raised when sampling info is not provided
        valid = np.eye(3)
        affmap_invalid_shape = imaffine.AffineMap(valid)
        assert_raises(ValueError, affmap_invalid_shape.transform, img)
        assert_raises(ValueError, affmap_invalid_shape.transform_inverse, img)

        # Verify exception is raised when requesting an invalid interpolation
        assert_raises(ValueError, affine_map.transform, img, 'invalid')
        assert_raises(ValueError, affine_map.transform_inverse, img, 'invalid')

        # Verify exception is raised when attempting to warp an image of
        # invalid dimension
        for dim in [2, 3]:
            affine_map = imaffine.AffineMap(np.eye(dim),
                                            cod_shape[:dim], None,
                                            dom_shape[:dim], None)
            for sh in [(2,), (2, 2, 2, 2)]:
                img = np.zeros(sh)
                assert_raises(ValueError, affine_map.transform, img)
                assert_raises(ValueError, affine_map.transform_inverse, img)
            aff_sing = np.zeros((dim + 1, dim + 1))
            aff_nan = np.zeros((dim + 1, dim + 1))
            aff_nan[...] = np.nan
            aff_inf = np.zeros((dim + 1, dim + 1))
            aff_inf[...] = np.inf

            assert_raises(
                AffineInvalidValuesError,
                affine_map.set_affine,
                aff_sing)
            assert_raises(AffineInvalidValuesError, affine_map.set_affine, aff_nan)
            assert_raises(AffineInvalidValuesError, affine_map.set_affine, aff_inf)

    # Verify AffineMap can not be created with non-2D matrices : len(shape) != 2
    for dim_not_2 in range(10):
        if dim_not_2 != _number_dim_affine_matrix:
            mat_large_dim = np.random.random([2]*dim_not_2)
            assert_raises(AffineInversionError, AffineMap, mat_large_dim)
Exemplo n.º 29
0
pre_align = np.array(
    [[1.02783543e+00, -4.83019053e-02, -6.07735639e-02, -2.57654118e+00],
     [4.34051706e-03, 9.41918267e-01, -2.66525861e-01, 3.23579799e+01],
     [5.34288908e-02, 2.90262026e-01, 9.80820307e-01, -1.46216651e+01],
     [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 1.00000000e+00]])
"""
As we did in the 2D example, we would like to visualize (some slices of) the two
volumes by overlapping them over two channels of a color image. To do that we
need them to be sampled on the same grid, so let's first re-sample the moving
image on the static grid. We create an AffineMap to transform the moving image
towards the static image
"""

from dipy.align.imaffine import AffineMap
affine_map = AffineMap(pre_align, static.shape, static_affine, moving.shape,
                       moving_affine)

resampled = affine_map.transform(moving)
"""
plot the overlapped middle slices of the volumes
"""

regtools.overlay_slices(static, resampled, None, 1, 'Static', 'Moving',
                        'input_3d.png')
"""
.. figure:: input_3d.png
   :align: center

   Static image in red on top of the pre-aligned moving image (in green).
"""
"""
Exemplo n.º 30
0
                    type=float,
                    default=0.5,
                    help="default: 0.5")
args = parser.parse_args()

img = get_img(args.nifti_file)
voxel_order = "".join(aff2axcodes(img.affine))
gtab = get_gtab(args.bvals, args.bvecs)
mask = get_img(args.mask_nifti)
data = img.get_fdata()

# resample mask if necessary
if mask.shape != data.shape:
    from dipy.align.imaffine import AffineMap
    identity = np.eye(4)
    affine_map = AffineMap(identity, img.shape[:3], img.affine, mask.shape[:3],
                           mask.affine)
    mask = affine_map.transform(mask.get_fdata())
    #mask = np.round(mask)
else:
    mask = mask.get_fdata()

# load or compute and save FA file
if (args.fa_numpy is not None) and os.path.isfile(args.fa_numpy):
    FA = np.load(args.fa_numpy, allow_pickle=True)
else:
    # Fit
    tenmodel = dti.TensorModel(gtab, fit_method='WLS')
    print('Fitting Tensor')
    tenfit = tenmodel.fit(data, mask)
    print('Computing anisotropy measures (FA,MD,RGB)')
    FA = tenfit.fa
Exemplo n.º 31
0
    def run(self, static_image_files, moving_image_files, transform_map_file,
            transform_type='affine', out_dir='',
            out_file='transformed.nii.gz'):
        """
        Parameters
        ----------
        static_image_files : string
            Path of the static image file.

        moving_image_files : string
            Path of the moving image(s). It can be a single image or a
            folder containing multiple images.

        transform_map_file : string
            For the affine case, it should be a text(*.txt) file containing
            the affine matrix. For the diffeomorphic case,
            it should be a nifti file containing the mapping displacement
            field in each voxel with this shape (x, y, z, 3, 2)

        transform_type : string, optional
            Select the transformation type to apply between 'affine' or
            'diffeomorphic'. (default affine)

        out_dir : string, optional
            Directory to save the transformed files (default '').

        out_file : string, optional
            Name of the transformed file (default 'transformed.nii.gz').
             It is recommended to use the flag --mix-names to
              prevent the output files from being overwritten.

        """
        if transform_type.lower() not in ['affine', 'diffeomorphic']:
            raise ValueError("Invalid transformation type: Please"
                             " provide a valid transform like 'affine'"
                             " or 'diffeomorphic'")

        io = self.get_io_iterator()

        for static_image_file, moving_image_file, transform_file, \
                out_file in io:

            # Loading the image data from the input files into object.
            static_image, static_grid2world = load_nifti(static_image_file)
            moving_image, moving_grid2world = load_nifti(moving_image_file)

            # Doing a sanity check for validating the dimensions of the input
            # images.
            check_dimensions(static_image, moving_image)

            if transform_type.lower() == 'affine':
                # Loading the affine matrix.
                affine_matrix = np.loadtxt(transform_file)

                # Setting up the affine transformation object.
                mapping = AffineMap(
                    affine=affine_matrix,
                    domain_grid_shape=static_image.shape,
                    domain_grid2world=static_grid2world,
                    codomain_grid_shape=moving_image.shape,
                    codomain_grid2world=moving_grid2world)

            elif transform_type.lower() == 'diffeomorphic':
                # Loading the diffeomorphic map.
                disp = nib.load(transform_file)

                mapping = DiffeomorphicMap(
                    3, disp.shape[:3],
                    disp_grid2world=np.linalg.inv(disp.affine),
                    domain_shape=static_image.shape,
                    domain_grid2world=static_grid2world,
                    codomain_shape=moving_image.shape,
                    codomain_grid2world=moving_grid2world)

                disp_data = disp.get_data()
                mapping.forward = disp_data[..., 0]
                mapping.backward = disp_data[..., 1]
                mapping.is_inverse = True

            # Transforming the image/
            transformed = mapping.transform(moving_image)

            save_nifti(out_file, transformed, affine=static_grid2world)
Exemplo n.º 32
0
    Diff2 = nib.load(directory + "/" + dIndex[2])
    Ktrans = nib.load(dIndex[3])
    #hdr = t2.header
    #print(hdr)
    static = t2.get_data()
    static_grid2world = t2.affine
    moving = Diff1.get_data()
    moving_grid2world = Diff1.affine
    moving2 = Diff2.get_data()
    moving2_grid2world = Diff2.affine
    moving3 = Ktrans.get_data()
    moving3_grid2world = Ktrans.affine
    identity = np.eye(4)

    affine_map = AffineMap(identity,
                           static.shape, static_grid2world,
                           moving.shape, moving_grid2world)
    resampled1 = affine_map.transform(moving)
    affine_map2 = AffineMap(identity,
                           static.shape, static_grid2world,
                           moving2.shape, moving2_grid2world)
    resampled2 = affine_map2.transform(moving2)
    affine_mapk = AffineMap(identity,
                           static.shape, static_grid2world,
                           moving3.shape, moving3_grid2world)
    resampledk = affine_mapk.transform(moving3)

    out= np.stack([static.transpose(2,0,1), resampled1.transpose(2,0,1), resampled2.transpose(2,0,1), resampledk.transpose(2,0,1)], axis=-1)
    #print(out.shape)
    patient = smoothslices(out, 19)
    stack += [patient]
Exemplo n.º 33
0
def registration(diff, affine_diff, anat, affine_anat):
    #Affine trasformation beetween diffuson and anatomical data
    static = np.squeeze(diff)[..., 0]
    static_grid2world = affine_diff

    moving = anat
    moving_grid2world = affine_anat

    identity = np.eye(4)
    affine_map = AffineMap(identity, static.shape, static_grid2world,
                           moving.shape, moving_grid2world)
    resampled = affine_map.transform(moving)
    regtools.overlay_slices(static, resampled, None, 0, "Static", "Moving",
                            "resampled_0.png")
    regtools.overlay_slices(static, resampled, None, 1, "Static", "Moving",
                            "resampled_1.png")
    regtools.overlay_slices(static, resampled, None, 2, "Static", "Moving",
                            "resampled_2.png")

    c_of_mass = transform_centers_of_mass(static, static_grid2world, moving,
                                          moving_grid2world)

    transformed = c_of_mass.transform(moving)
    regtools.overlay_slices(static, transformed, None, 0, "Static",
                            "Transformed", "transformed_com_0.png")
    regtools.overlay_slices(static, transformed, None, 1, "Static",
                            "Transformed", "transformed_com_1.png")
    regtools.overlay_slices(static, transformed, None, 2, "Static",
                            "Transformed", "transformed_com_2.png")

    nbins = 32
    sampling_prop = None
    metric = MutualInformationMetric(nbins, sampling_prop)
    level_iters = [10000, 1000, 100]
    factors = [4, 2, 1]
    sigmas = [3.0, 1.0, 0.0]
    affreg = AffineRegistration(metric=metric,
                                level_iters=level_iters,
                                sigmas=sigmas,
                                factors=factors)

    transform = TranslationTransform3D()
    params0 = None
    starting_affine = c_of_mass.affine
    translation = affreg.optimize(static,
                                  moving,
                                  transform,
                                  params0,
                                  static_grid2world,
                                  moving_grid2world,
                                  starting_affine=starting_affine)

    transformed = translation.transform(moving)
    regtools.overlay_slices(static, transformed, None, 0, "Static",
                            "Transformed", "transformed_trans_0.png")
    regtools.overlay_slices(static, transformed, None, 1, "Static",
                            "Transformed", "transformed_trans_1.png")
    regtools.overlay_slices(static, transformed, None, 2, "Static",
                            "Transformed", "transformed_trans_2.png")

    transform = RigidTransform3D()
    params0 = None
    starting_affine = translation.affine
    rigid = affreg.optimize(static,
                            moving,
                            transform,
                            params0,
                            static_grid2world,
                            moving_grid2world,
                            starting_affine=starting_affine)

    transformed = rigid.transform(moving)
    regtools.overlay_slices(static, transformed, None, 0, "Static",
                            "Transformed", "transformed_rigid_0.png")
    regtools.overlay_slices(static, transformed, None, 1, "Static",
                            "Transformed", "transformed_rigid_1.png")
    regtools.overlay_slices(static, transformed, None, 2, "Static",
                            "Transformed", "transformed_rigid_2.png")

    transform = AffineTransform3D()
    params0 = None
    starting_affine = rigid.affine
    affine = affreg.optimize(static,
                             moving,
                             transform,
                             params0,
                             static_grid2world,
                             moving_grid2world,
                             starting_affine=starting_affine)
    transformed = affine.transform(moving)
    regtools.overlay_slices(static, transformed, None, 0, "Static",
                            "Transformed", "transformed_affine_0.png")
    regtools.overlay_slices(static, transformed, None, 1, "Static",
                            "Transformed", "transformed_affine_1.png")
    regtools.overlay_slices(static, transformed, None, 2, "Static",
                            "Transformed", "transformed_affine_2.png")

    inverse_map = AffineMap(starting_affine, static.shape, static_grid2world,
                            moving.shape, moving_grid2world)
    resampled_inverse = inverse_map.transform_inverse(transformed,
                                                      resample_only=True)
    nib.save(nib.Nifti1Image(resampled_inverse, affine_diff),
             'brain.coreg.nii.gz')
    return transformed
Exemplo n.º 34
0
def affine_registration(moving,
                        static,
                        moving_affine=None,
                        static_affine=None,
                        pipeline=None,
                        starting_affine=None,
                        metric='MI',
                        level_iters=None,
                        sigmas=None,
                        factors=None,
                        ret_metric=False,
                        **metric_kwargs):
    """
    Find the affine transformation between two 3D images. Alternatively, find
    the combination of several linear transformations.

    Parameters
    ----------
    moving : array, nifti image or str
        Containing the data for the moving object, or full path to a nifti file
        with the moving data.

    static : array, nifti image or str
        Containing the data for the static object, or full path to a nifti file
        with the moving data.

    moving_affine : 4x4 array, optional
        An affine transformation associated with the moving object. Required if
        data is provided as an array. If provided together with nifti/path,
        will over-ride the affine that is in the nifti.

    static_affine : 4x4 array, optional
        An affine transformation associated with the static object. Required if
        data is provided as an array. If provided together with nifti/path,
        will over-ride the affine that is in the nifti.

    pipeline : list of str, optional
        Sequence of transforms to use in the gradual fitting. Default: gradual
        fit of the full affine (executed from left to right):
        ``["center_of_mass", "translation", "rigid", "affine"]``
        Alternatively, any other combination of the following registration
        methods might be used: center_of_mass, translation, rigid,
        rigid_isoscaling, rigid_scaling and affine.

    starting_affine: 4x4 array, optional
        Initial guess for the transformation between the spaces.
        Default: identity.

    metric : str, optional.
        Currently only supports 'MI' for MutualInformationMetric.

    level_iters : sequence, optional
        AffineRegistration key-word argument: the number of iterations at each
        scale of the scale space. `level_iters[0]` corresponds to the coarsest
        scale, `level_iters[-1]` the finest, where n is the length of the
        sequence. By default, a 3-level scale space with iterations
        sequence equal to [10000, 1000, 100] will be used.

    sigmas : sequence of floats, optional
        AffineRegistration key-word argument: custom smoothing parameter to
        build the scale space (one parameter for each scale). By default,
        the sequence of sigmas will be [3, 1, 0].

    factors : sequence of floats, optional
        AffineRegistration key-word argument: custom scale factors to build the
        scale space (one factor for each scale). By default, the sequence of
        factors will be [4, 2, 1].

    ret_metric : boolean, optional
        Set it to True to return the value of the optimized coefficients and
        the optimization quality metric.

    nbins : int, optional
        MutualInformationMetric key-word argument: the number of bins to be
        used for computing the intensity histograms. The default is 32.

    sampling_proportion : None or float in interval (0, 1], optional
        MutualInformationMetric key-word argument: There are two types of
        sampling: dense and sparse. Dense sampling uses all voxels for
        estimating the (joint and marginal) intensity histograms, while
        sparse sampling uses a subset of them. If `sampling_proportion` is
        None, then dense sampling is used. If `sampling_proportion` is a
        floating point value in (0,1] then sparse sampling is used,
        where `sampling_proportion` specifies the proportion of voxels to
        be used. The default is None (dense sampling).

    Returns
    -------
    transformed : array with moving data resampled to the static space
    after computing the affine transformation
    affine : the affine 4x4 associated with the transformation.
    xopt : the value of the optimized coefficients.
    fopt : the value of the optimization quality metric.

    Notes
    -----
    Performs a gradual registration between the two inputs, using a pipeline
    that gradually approximates the final registration. If the final default
    step (`affine`) is ommitted, the resulting affine may not have all 12
    degrees of freedom adjusted.
    """
    pipeline = pipeline or ["center_of_mass", "translation", "rigid", "affine"]
    level_iters = level_iters or [10000, 1000, 100]
    sigmas = sigmas or [3, 1, 0.0]
    factors = factors or [4, 2, 1]

    static, static_affine, moving, moving_affine, starting_affine = \
        _handle_pipeline_inputs(moving, static,
                                moving_affine=moving_affine,
                                static_affine=static_affine,
                                starting_affine=starting_affine)

    # Define the Affine registration object we'll use with the chosen metric.
    # For now, there is only one metric (mutual information)
    use_metric = affine_metric_dict[metric](**metric_kwargs)

    affreg = AffineRegistration(metric=use_metric,
                                level_iters=level_iters,
                                sigmas=sigmas,
                                factors=factors)

    # Convert pipeline to sanitized list of str
    pipeline = list(pipeline)
    for fi, func in enumerate(pipeline):
        if callable(func):
            for key, val in _METHOD_DICT.items():
                if func is val[0]:  # if they passed the callable equiv.
                    pipeline[fi] = func = key
                    break
        if not isinstance(func, str) or func not in _METHOD_DICT:
            raise ValueError(f'pipeline[{fi}] must be one of '
                             f'{list(_METHOD_DICT)}, got {repr(func)}')

    if pipeline == ["center_of_mass"] and ret_metric:
        raise ValueError("center of mass registration cannot return any "
                         "quality metric.")

    # Go through the selected transformation:
    for func in pipeline:
        if func == "center_of_mass":
            transform = transform_centers_of_mass(static, static_affine,
                                                  moving, moving_affine)
            starting_affine = transform.affine
        else:
            transform = _METHOD_DICT[func][1]()
            xform, xopt, fopt \
                = affreg.optimize(static, moving, transform, None,
                                  static_affine, moving_affine,
                                  starting_affine=starting_affine,
                                  ret_metric=True)
            starting_affine = xform.affine

    # After doing all that, resample once at the end:
    affine_map = AffineMap(starting_affine, static.shape, static_affine,
                           moving.shape, moving_affine)

    resampled = affine_map.transform(moving)

    # Return the optimization metric only if requested
    if ret_metric:
        return resampled, starting_affine, xopt, fopt
    return resampled, starting_affine
Exemplo n.º 35
0
def main():

    args = parse_args()
    logging.basicConfig(level=logging.getLevelName(args.debug_level.upper()))

    pybids_cache_path = os.path.join(args.bids_path, PYBIDS_CACHE_PATH)

    layout = bids.BIDSLayout(
        args.bids_path,
        database_path=pybids_cache_path,
        reset_database=args.force_reindex,
        index_metadata=False,
        validate=False,
    )

    if args.datalad:
        annex_repo = AnnexRepo(args.bids_path)

    subject_list = (
        args.participant_label if args.participant_label else bids.layout.Query.ANY
    )
    session_list = args.session_label if args.session_label else bids.layout.Query.ANY
    filters = dict(
        subject=subject_list,
        session=session_list,
        **args.ref_bids_filters,
        extension=['nii','nii.gz'])
    deface_ref_images = layout.get(**filters)

    if not len(deface_ref_images):
        logging.info(f"no reference image found with condition {filters}")
        return

    new_files, modified_files = [], []

    script_dir = os.path.dirname(__file__)

    mni_path = os.path.abspath(os.path.join(script_dir, MNI_PATH))
    mni_mask_path = os.path.abspath(os.path.join(script_dir, MNI_MASK_PATH))
    # if the MNI template image is not available locally
    if not os.path.exists(os.path.realpath(mni_path)):
        datalad.api.get(mni_path, dataset=datalad.api.Dataset(script_dir + "/../../"))
    tmpl_image = nb.load(mni_path)
    tmpl_image_mask = nb.load(mni_mask_path)
    tmpl_defacemask = generate_deface_ear_mask(tmpl_image)
    brain_xtractor = Extractor()

    for ref_image in deface_ref_images:
        subject = ref_image.entities["subject"]
        session = ref_image.entities["session"]

        datalad.api.get(ref_image.path)
        ref_image_nb = ref_image.get_image()

        matrix_path = ref_image.path.replace(
            "_%s.%s" % (ref_image.entities["suffix"], ref_image.entities["extension"]),
            "_mod-%s_defacemaskreg.mat" % ref_image.entities["suffix"],
        )

        if os.path.exists(matrix_path):
            logging.info("reusing existing registration matrix")
            ref2tpl_affine = AffineMap(np.loadtxt(matrix_path))
        else:
            logging.info(f"running registration of reference serie: {ref_image.path}")
            brain_mask = (brain_xtractor.run(ref_image_nb.get_fdata()) > 0.99).astype(
                np.uint8
            )
            brain_mask[:] = scipy.ndimage.morphology.binary_dilation(
                brain_mask, iterations=4
            )
            brain_mask_nb = nb.Nifti1Image(brain_mask, ref_image_nb.affine)
            ref2tpl_affine = registration(
                tmpl_image, ref_image_nb, tmpl_image_mask, brain_mask_nb
            )
            np.savetxt(matrix_path, ref2tpl_affine.affine)
            new_files.append(matrix_path)

        if args.debug_images:
            output_debug_images(tmpl_image, ref_image, ref2tpl_affine)

        series_to_deface = []
        for filters in args.other_bids_filters:
            series_to_deface.extend(
                layout.get(
                    extension=["nii", "nii.gz"],
                    subject=subject,
                    session=session,
                    **filters,
                )
            )

        # unlock before making any change to avoid unwanted save
        if args.datalad:
            annex_repo.unlock([serie.path for serie in series_to_deface])

        for serie in series_to_deface:
            if args.datalad:
                if (
                    next(annex_repo.get_metadata(serie.path))[1].get(
                        "distribution-restrictions"
                    )
                    is None
                ):
                    logging.info(
                        f"skip {serie.path} as there are no distribution restrictions metadata set."
                    )
                    continue
            logging.info(f"defacing {serie.path}")

            datalad.api.get(serie.path)
            serie_nb = serie.get_image()
            warped_mask = warp_mask(tmpl_defacemask, serie_nb, ref2tpl_affine)
            if args.save_all_masks or serie == ref_image:
                warped_mask_path = serie.path.replace(
                    "_%s" % serie.entities["suffix"],
                    "_mod-%s_defacemask" % serie.entities["suffix"],
                )
                if os.path.exists(warped_mask_path):
                    logging.warning(
                        f"{warped_mask_path} already exists : will not overwrite, clean before rerun"
                    )
                else:
                    warped_mask.to_filename(warped_mask_path)
                    new_files.append(warped_mask_path)

            masked_serie = nb.Nifti1Image(
                np.asanyarray(serie_nb.dataobj) * np.asanyarray(warped_mask.dataobj),
                serie_nb.affine,
                serie_nb.header,
            )
            masked_serie.to_filename(serie.path)
            modified_files.append(serie.path)

    if args.datalad and len(modified_files):
        logging.info("saving files and metadata changes in datalad")
        annex_repo.set_metadata(
            modified_files, remove={"distribution-restrictions": "sensitive"}
        )
        datalad.api.save(
            modified_files + new_files,
            message="deface %d series/images and update distribution-restrictions"
            % len(modified_files),
        )
Exemplo n.º 36
0
brainweb_strip = brainweb_strip.transpose([0, 2, 1])[::-1, :, :]
brainweb_mask = brainweb_strip > 0

brainweb_name = info.get_brainweb("t1", "raw")
brainweb_nib = nib.load(brainweb_name)
brainweb = brainweb_nib.get_data().squeeze()
brainweb_affine = brainweb_nib.get_affine()
brainweb = brainweb.transpose([0, 2, 1])[::-1, :, :]
rt.plot_slices(brainweb)
brainweb_affine = ibsr1_affine.copy()
brainweb_affine[brainweb_affine != 0] = 1
brainweb_affine[0, 0] = -1


# Reslice Brainweb on IBSR1
ibsr_to_bw = AffineMap(None, ibsr1.shape, ibsr1_affine, brainweb.shape, brainweb_affine)
bw_on_ibsr1 = ibsr_to_bw.transform(brainweb)
rt.overlay_slices(ibsr1, bw_on_ibsr1)  # misaligned

c_of_mass = transform_centers_of_mass(ibsr1, ibsr1_affine, brainweb, brainweb_affine)
bw_on_ibsr1 = c_of_mass.transform(brainweb)
rt.overlay_slices(ibsr1, bw_on_ibsr1)  # roughly aligned

# Start affine alignment
aff_name = "ibsr1_to_brainweb.p"
if os.path.isfile(aff_name):
    ibsr_bw_affmap = pickle.load(open(aff_name, "r"))
else:
    ibsr_bw_affmap = dipy_align(ibsr1, ibsr1_affine, brainweb, brainweb_affine)
    pickle.dump(ibsr_bw_affmap, open(aff_name, "w"))
bw_on_ibsr1 = ibsr_bw_affmap.transform(brainweb)
Exemplo n.º 37
0
def resample(moving, static, moving_grid2world, static_grid2world):
    """Resample an image from one space to another."""
    identity = np.eye(4)
    affine_map = AffineMap(identity, static.shape, static_grid2world, moving.shape, moving_grid2world)
    resampled = affine_map.transform(moving)
Exemplo n.º 38
0
    def run(self,
            static_image_files,
            moving_image_files,
            transform_map_file,
            transform_type='affine',
            out_dir='',
            out_file='transformed.nii.gz'):
        """
        Parameters
        ----------
        static_image_files : string
            Path of the static image file.

        moving_image_files : string
            Path of the moving image(s). It can be a single image or a
            folder containing multiple images.

        transform_map_file : string
            For the affine case, it should be a text(*.txt) file containing
            the affine matrix. For the diffeomorphic case,
            it should be a nifti file containing the mapping displacement
            field in each voxel with this shape (x, y, z, 3, 2)

        transform_type : string, optional
            Select the transformation type to apply between 'affine' or
            'diffeomorphic'. (default affine)

        out_dir : string, optional
            Directory to save the transformed files (default '').

        out_file : string, optional
            Name of the transformed file (default 'transformed.nii.gz').
             It is recommended to use the flag --mix-names to
              prevent the output files from being overwritten.

        """
        if transform_type.lower() not in ['affine', 'diffeomorphic']:
            raise ValueError("Invalid transformation type: Please"
                             " provide a valid transform like 'affine'"
                             " or 'diffeomorphic'")

        io = self.get_io_iterator()

        for static_image_file, moving_image_file, transform_file, \
                out_file in io:

            # Loading the image data from the input files into object.
            static_image, static_grid2world = load_nifti(static_image_file)
            moving_image, moving_grid2world = load_nifti(moving_image_file)

            # Doing a sanity check for validating the dimensions of the input
            # images.
            check_dimensions(static_image, moving_image)

            if transform_type.lower() == 'affine':
                # Loading the affine matrix.
                affine_matrix = np.loadtxt(transform_file)

                # Setting up the affine transformation object.
                mapping = AffineMap(affine=affine_matrix,
                                    domain_grid_shape=static_image.shape,
                                    domain_grid2world=static_grid2world,
                                    codomain_grid_shape=moving_image.shape,
                                    codomain_grid2world=moving_grid2world)

            elif transform_type.lower() == 'diffeomorphic':
                # Loading the diffeomorphic map.
                disp_data, disp_affine = load_nifti(transform_file)

                mapping = DiffeomorphicMap(
                    3,
                    disp_data.shape[:3],
                    disp_grid2world=np.linalg.inv(disp_affine),
                    domain_shape=static_image.shape,
                    domain_grid2world=static_grid2world,
                    codomain_shape=moving_image.shape,
                    codomain_grid2world=moving_grid2world)

                mapping.forward = disp_data[..., 0]
                mapping.backward = disp_data[..., 1]
                mapping.is_inverse = True

            # Transforming the image/
            transformed = mapping.transform(moving_image)

            save_nifti(out_file, transformed, affine=static_grid2world)
Exemplo n.º 39
0
"""

files, folder = fetch_syn_data()
moving_data, moving_affine = load_nifti(pjoin(folder, 'b0.nii.gz'))
moving = moving_data
moving_grid2world = moving_affine
"""
We can see that the images are far from aligned by drawing one on top of
the other. The images don't even have the same number of voxels, so in order
to draw one on top of the other we need to resample the moving image on a grid
of the same dimensions as the static image, we can do this by "transforming"
the moving image using an identity transform
"""

identity = np.eye(4)
affine_map = AffineMap(identity, static.shape, static_grid2world, moving.shape,
                       moving_grid2world)
resampled = affine_map.transform(moving)
regtools.overlay_slices(static, resampled, None, 0, "Static", "Moving",
                        "resampled_0.png")
regtools.overlay_slices(static, resampled, None, 1, "Static", "Moving",
                        "resampled_1.png")
regtools.overlay_slices(static, resampled, None, 2, "Static", "Moving",
                        "resampled_2.png")
"""
.. figure:: resampled_0.png
   :align: center
.. figure:: resampled_1.png
   :align: center
.. figure:: resampled_2.png
   :align: center
Exemplo n.º 40
0
def quick_check():

    img1_fname = "/home/omar/data/DATA_NeoBrainS12/T1.nii.gz"
    img2_fname = "/home/omar/data/DATA_NeoBrainS12/set2_i1_t1.nii.gz"

    img1_nib = nib.load(img1_fname)
    img1 = img1_nib.get_data().squeeze()
    img1_affine = img1_nib.get_affine()

    img2_nib = nib.load(img2_fname)
    img2 = img2_nib.get_data().squeeze()
    img2_affine = img2_nib.get_affine()
    # nib.aff2axcodes(img1_affine)
    #aff = AffineMap(None, img1.shape, img1_affine, img2.shape, img2_affine)
    #aff = transform_centers_of_mass(img1, img1_affine, img2, img2_affine)
    aff = dipy_align(img1, img1_affine, img2, img2_affine, np.eye(4))

    img2_resampled = aff.transform(img2)
    rt.overlay_slices(img1, img2_resampled, slice_type=0)
    rt.overlay_slices(img1, img2_resampled, slice_type=1)
    rt.overlay_slices(img1, img2_resampled, slice_type=2)



    # Verify that original and RAS versions of neo1 describe the same object

    # Load original data
    neo1_fname = get_neobrain('train', 1, 'T1')
    neo1_old, neo1_old_affine, neo1_old_spacing, neo1_old_ori = load_from_raw(neo1_fname)

    # Load RAS version
    neo1_nib = nib.load(neo1_fname)
    neo1 = neo1_nib.get_data()
    neo1_affine = neo1_nib.get_affine()

    # Resample RAS on top of original
    aff = AffineMap(None, neo1_old.shape, neo1_old_affine, neo1.shape, neo1_affine)
    neo1_resampled = aff.transform(neo1)
    rt.overlay_slices(neo1_old, neo1_resampled, slice_type=0)
    rt.overlay_slices(neo1_old, neo1_resampled, slice_type=1)
    rt.overlay_slices(neo1_old, neo1_resampled, slice_type=2)


    # Attempt to resample a test volume on top of training
    neo2_fname = get_neobrain('test', 1, 'i1_t1')
    neo2_nib = nib.load(neo2_fname)
    neo2 = neo2_nib.get_data()
    neo2_affine = neo2_nib.get_affine()
    aff = transform_centers_of_mass(neo1, neo1_affine, neo2, neo2_affine)
    #aff = dipy_align(neo1, neo1_affine, neo2, neo2_affine)
    neo2_resampled = aff.transform(neo2)

    rt.overlay_slices(neo1, neo2_resampled, slice_type=0)
    rt.overlay_slices(neo1, neo2_resampled, slice_type=1)
    rt.overlay_slices(neo1, neo2_resampled, slice_type=2)



    # Load atlas
    atlas_fname = get_neobrain('atlas', 'neo-withSkull', None)
    atlas_nib = nib.load(atlas_fname)
    atlas_affine = atlas_nib.get_affine()
    atlas = atlas_nib.get_data()
    rt.plot_slices(atlas)

    # Resample atlas on top of neo1
    aff = AffineMap(None, neo1.shape, neo1_affine, atlas.shape, atlas_affine)
    atlas_resampled = aff.transform(atlas)
    rt.overlay_slices(neo1, atlas_resampled)