Example #1
0
def rescale_centroids(ctd_list, img, voxel_spacing=(1, 1, 1)):
    """rescale centroid coordinates to new spacing in current x-y-z-orientation
    
    Parameters:
    ----------
    ctd_list: list of centroids
    img: nibabel image 
    voxel_spacing: desired spacing
    
    Returns:
    ----------
    out_list: rescaled list of centroids 
    
    """
    ornt_img = nio.io_orientation(img.affine)
    ornt_ctd = nio.axcodes2ornt(ctd_list[0])
    if np.array_equal(ornt_img, ornt_ctd):
        zms = img.header.get_zooms()
    else:
        ornt_trans = nio.ornt_transform(ornt_img, ornt_ctd)
        aff_trans = nio.inv_ornt_aff(ornt_trans, img.dataobj.shape)
        new_aff = np.matmul(img.affine, aff_trans)
        zms = nib.affines.voxel_sizes(new_aff)
    ctd_arr = np.transpose(np.asarray(ctd_list[1:]))
    v_list = ctd_arr[0].astype(int).tolist()  # vertebral labels
    ctd_arr = ctd_arr[1:]
    ctd_arr[0] = np.around(ctd_arr[0] * zms[0] / voxel_spacing[0], decimals=1)
    ctd_arr[1] = np.around(ctd_arr[1] * zms[1] / voxel_spacing[1], decimals=1)
    ctd_arr[2] = np.around(ctd_arr[2] * zms[2] / voxel_spacing[2], decimals=1)
    out_list = [ctd_list[0]]
    ctd_list = np.transpose(ctd_arr).tolist()
    for v, ctd in zip(v_list, ctd_list):
        out_list.append([v] + ctd)
    print("[*] Rescaled centroid coordinates to spacing (x, y, z) =", voxel_spacing, "mm")
    return out_list
Example #2
0
def reorient_to(img, axcodes_to=('P', 'I', 'R'), verb=False):
    """Reorients the nifti from its original orientation to another specified orientation
    
    Parameters:
    ----------
    img: nibabel image
    axcodes_to: a tuple of 3 characters specifying the desired orientation
    
    Returns:
    ----------
    newimg: The reoriented nibabel image 
    
    """
    aff = img.affine
    arr = np.asanyarray(img.dataobj, dtype=img.dataobj.dtype)
    ornt_fr = nio.io_orientation(aff)
    ornt_to = nio.axcodes2ornt(axcodes_to)
    ornt_trans = nio.ornt_transform(ornt_fr, ornt_to)
    arr = nio.apply_orientation(arr, ornt_trans)
    aff_trans = nio.inv_ornt_aff(ornt_trans, arr.shape)
    newaff = np.matmul(aff, aff_trans)
    newimg = nib.Nifti1Image(arr, newaff)
    if verb:
        print("[*] Image reoriented from", nio.ornt2axcodes(ornt_fr), "to", axcodes_to)
    return newimg
Example #3
0
def reorient_image_and_affine(img, aff):
    """ Reorient an image and and affine such that the affine is approx. diagonal
      and such that the elements on the main diagonal are positiv
  
  Parameters
  ----------
  img : 3D numpy array
    containing the image

  aff : 2D numpy array
    (4,4) affine transformation matrix from image to anatomical coordinate system in
    homogeneous coordinates

  Returns
  -------
  a tuple with the reoriented image and the accordingly transformed affine

  Note
  ----
  The reorientation uses nibabel's io_orientation() and apply_orientation()
  """
    ornt = nib.io_orientation(aff)
    img_t = nib.apply_orientation(img, ornt)
    aff_t = aff.dot(inv_ornt_aff(ornt, img.shape))

    return img_t, aff_t
Example #4
0
def _as_reoriented_backport(img, ornt):
    """Backport of img.as_reoriented as of nibabel 2.2.0"""
    import numpy as np
    import nibabel as nb
    from nibabel.orientations import inv_ornt_aff
    if np.array_equal(ornt, [[0, 1], [1, 1], [2, 1]]):
        return img

    t_arr = nb.apply_orientation(img.get_data(), ornt)
    new_aff = img.affine.dot(inv_ornt_aff(ornt, img.shape))
    reoriented = img.__class__(t_arr, new_aff, img.header)

    if isinstance(reoriented, nb.Nifti1Pair):
        # Also apply the transform to the dim_info fields
        new_dim = list(reoriented.header.get_dim_info())
        for idx, value in enumerate(new_dim):
            # For each value, leave as None if it was that way,
            # otherwise check where we have mapped it to
            if value is None:
                continue
            new_dim[idx] = np.where(ornt[:, 0] == idx)[0]

        reoriented.header.set_dim_info(*new_dim)

    return reoriented
Example #5
0
    def _run_interface(self, runtime):
        import numpy as np
        import nibabel as nb
        from nibabel.orientations import (axcodes2ornt, ornt_transform,
                                          inv_ornt_aff)

        fname = self.inputs.in_file
        orig_img = nb.load(fname)

        # Find transform from current (approximate) orientation to
        # target, in nibabel orientation matrix and affine forms
        orig_ornt = nb.io_orientation(orig_img.affine)
        targ_ornt = axcodes2ornt(self.inputs.orientation)
        transform = ornt_transform(orig_ornt, targ_ornt)
        affine_xfm = inv_ornt_aff(transform, orig_img.shape)

        # Check can be eliminated when minimum nibabel version >= 2.2
        if hasattr(orig_img, 'as_reoriented'):
            reoriented = orig_img.as_reoriented(transform)
        else:
            reoriented = _as_reoriented_backport(orig_img, transform)

        # Image may be reoriented
        if reoriented is not orig_img:
            suffix = '_' + self.inputs.orientation.lower()
            out_name = fname_presuffix(fname,
                                       suffix=suffix,
                                       newpath=runtime.cwd)
            reoriented.to_filename(out_name)
        else:
            out_name = fname

        mat_name = fname_presuffix(fname,
                                   suffix='.mat',
                                   newpath=runtime.cwd,
                                   use_ext=False)
        np.savetxt(mat_name, affine_xfm, fmt='%.08f')

        self._results['out_file'] = out_name
        self._results['transform'] = mat_name

        return runtime
Example #6
0
File: image.py Project: nipy/nipype
def _as_reoriented_backport(img, ornt):
    """Backport of img.as_reoriented as of nibabel 2.4.0"""
    import numpy as np
    import nibabel as nb
    from nibabel.orientations import inv_ornt_aff
    if np.array_equal(ornt, [[0, 1], [1, 1], [2, 1]]):
        return img

    t_arr = nb.apply_orientation(img.get_data(), ornt)
    new_aff = img.affine.dot(inv_ornt_aff(ornt, img.shape))
    reoriented = img.__class__(t_arr, new_aff, img.header)

    if isinstance(reoriented, nb.Nifti1Pair):
        # Also apply the transform to the dim_info fields
        new_dim = [None if orig_dim is None else int(ornt[orig_dim, 0])
                   for orig_dim in img.header.get_dim_info()]

        reoriented.header.set_dim_info(*new_dim)

    return reoriented
Example #7
0
def _as_reoriented_backport(img, ornt):
    """Backport of img.as_reoriented as of nibabel 2.4.0"""
    import numpy as np
    import nibabel as nb
    from nibabel.orientations import inv_ornt_aff
    if np.array_equal(ornt, [[0, 1], [1, 1], [2, 1]]):
        return img

    t_arr = nb.apply_orientation(img.get_data(), ornt)
    new_aff = img.affine.dot(inv_ornt_aff(ornt, img.shape))
    reoriented = img.__class__(t_arr, new_aff, img.header)

    if isinstance(reoriented, nb.Nifti1Pair):
        # Also apply the transform to the dim_info fields
        new_dim = [
            None if orig_dim is None else int(ornt[orig_dim, 0])
            for orig_dim in img.header.get_dim_info()
        ]

        reoriented.header.set_dim_info(*new_dim)

    return reoriented
Example #8
0
File: image.py Project: nipy/nipype
    def _run_interface(self, runtime):
        import numpy as np
        import nibabel as nb
        from nibabel.orientations import (
            axcodes2ornt, ornt_transform, inv_ornt_aff)

        fname = self.inputs.in_file
        orig_img = nb.load(fname)

        # Find transform from current (approximate) orientation to
        # target, in nibabel orientation matrix and affine forms
        orig_ornt = nb.io_orientation(orig_img.affine)
        targ_ornt = axcodes2ornt(self.inputs.orientation)
        transform = ornt_transform(orig_ornt, targ_ornt)
        affine_xfm = inv_ornt_aff(transform, orig_img.shape)

        # Check can be eliminated when minimum nibabel version >= 2.4
        if LooseVersion(nb.__version__) >= LooseVersion('2.4.0'):
            reoriented = orig_img.as_reoriented(transform)
        else:
            reoriented = _as_reoriented_backport(orig_img, transform)

        # Image may be reoriented
        if reoriented is not orig_img:
            suffix = '_' + self.inputs.orientation.lower()
            out_name = fname_presuffix(fname, suffix=suffix,
                                       newpath=runtime.cwd)
            reoriented.to_filename(out_name)
        else:
            out_name = fname

        mat_name = fname_presuffix(fname, suffix='.mat',
                                   newpath=runtime.cwd, use_ext=False)
        np.savetxt(mat_name, affine_xfm, fmt='%.08f')

        self._results['out_file'] = out_name
        self._results['transform'] = mat_name

        return runtime
Example #9
0
def _as_reoriented_backport(img, ornt):
    """Backport of img.as_reoriented as of nibabel 2.2.0"""
    from nibabel.orientations import inv_ornt_aff
    if np.array_equal(ornt, [[0, 1], [1, 1], [2, 1]]):
        return img

    t_arr = nb.apply_orientation(img.get_data(), ornt)
    new_aff = img.affine.dot(inv_ornt_aff(ornt, img.shape))
    reoriented = img.__class__(t_arr, new_aff, img.header)

    if isinstance(reoriented, nb.Nifti1Pair):
        # Also apply the transform to the dim_info fields
        new_dim = list(reoriented.header.get_dim_info())
        for idx, value in enumerate(new_dim):
            # For each value, leave as None if it was that way,
            # otherwise check where we have mapped it to
            if value is None:
                continue
            new_dim[idx] = np.where(ornt[:, 0] == idx)[0]

        reoriented.header.set_dim_info(*new_dim)

    return reoriented
Example #10
0
def reorder_voxels(vox_array, affine, voxel_order):
    '''Reorder the given voxel array and corresponding affine.

    Parameters
    ----------
    vox_array : array
        The array of voxel data

    affine : array
        The affine for mapping voxel indices to Nifti patient space

    voxel_order : str
        A three character code specifing the desired ending point for rows,
        columns, and slices in terms of the orthogonal axes of patient space:
        (l)eft, (r)ight, (a)nterior, (p)osterior, (s)uperior, and (i)nferior.

    Returns
    -------
    out_vox : array
        An updated view of vox_array.

    out_aff : array
        A new array with the updated affine

    reorient_transform : array
        The transform used to update the affine.

    ornt_trans : tuple
        The orientation transform used to update the orientation.

    '''
    #Check if voxel_order is valid
    voxel_order = voxel_order.upper()
    if len(voxel_order) != 3:
        raise ValueError('The voxel_order must contain three characters')
    dcm_axes = ['LR', 'AP', 'SI']
    for char in voxel_order:
        if not char in 'LRAPSI':
            raise ValueError('The characters in voxel_order must be one '
                             'of: L,R,A,P,I,S')
        for idx, axis in enumerate(dcm_axes):
            if char in axis:
                del dcm_axes[idx]
    if len(dcm_axes) != 0:
        raise ValueError('No character in voxel_order corresponding to '
                         'axes: %s' % dcm_axes)

    #Check the vox_array and affine have correct shape/size
    if len(vox_array.shape) < 3:
        raise ValueError('The vox_array must be at least three dimensional')
    if affine.shape != (4, 4):
        raise ValueError('The affine must be 4x4')

    #Pull the current index directions from the affine
    orig_ornt = io_orientation(affine)
    new_ornt = axcodes2ornt(voxel_order)
    ornt_trans = ornt_transform(orig_ornt, new_ornt)
    orig_shape = vox_array.shape
    vox_array = apply_orientation(vox_array, ornt_trans)
    aff_trans = inv_ornt_aff(ornt_trans, orig_shape)
    affine = np.dot(affine, aff_trans)

    return (vox_array, affine, aff_trans, ornt_trans)
Example #11
0
def proc_file(infile, opts):
    # figure out the output filename, and see if it exists
    basefilename = splitext_addext(os.path.basename(infile))[0]
    if opts.outdir is not None:
        # set output path
        basefilename = os.path.join(opts.outdir, basefilename)

    # prep a file
    if opts.compressed:
        verbose('Using gzip compression')
        outfilename = basefilename + '.nii.gz'
    else:
        outfilename = basefilename + '.nii'
    if os.path.isfile(outfilename) and not opts.overwrite:
        raise IOError('Output file "%s" exists, use --overwrite to '
                      'overwrite it' % outfilename)

    # load the PAR header and data
    scaling = 'dv' if opts.scaling == 'off' else opts.scaling
    infile = fname_ext_ul_case(infile)
    pr_img = pr.load(infile,
                     permit_truncated=opts.permit_truncated,
                     scaling=scaling,
                     strict_sort=opts.strict_sort)
    pr_hdr = pr_img.header
    affine = pr_hdr.get_affine(origin=opts.origin)
    slope, intercept = pr_hdr.get_data_scaling(scaling)
    if opts.scaling != 'off':
        verbose('Using data scaling "%s"' % opts.scaling)
    # get original scaling, and decide if we scale in-place or not
    if opts.scaling == 'off':
        slope = np.array([1.])
        intercept = np.array([0.])
        in_data = pr_img.dataobj.get_unscaled()
        out_dtype = pr_hdr.get_data_dtype()
    elif not np.any(np.diff(slope)) and not np.any(np.diff(intercept)):
        # Single scalefactor case
        slope = slope.ravel()[0]
        intercept = intercept.ravel()[0]
        in_data = pr_img.dataobj.get_unscaled()
        out_dtype = pr_hdr.get_data_dtype()
    else:
        # Multi scalefactor case
        slope = np.array([1.])
        intercept = np.array([0.])
        in_data = np.array(pr_img.dataobj)
        out_dtype = np.float64
    # Reorient data block to LAS+ if necessary
    ornt = io_orientation(np.diag([-1, 1, 1, 1]).dot(affine))
    if np.all(ornt == [[0, 1], [1, 1], [2, 1]]):  # already in LAS+
        t_aff = np.eye(4)
    else:  # Not in LAS+
        t_aff = inv_ornt_aff(ornt, pr_img.shape)
        affine = np.dot(affine, t_aff)
        in_data = apply_orientation(in_data, ornt)

    bvals, bvecs = pr_hdr.get_bvals_bvecs()
    if not opts.keep_trace:  # discard Philips DTI trace if present
        if bvecs is not None:
            bad_mask = np.logical_and(bvals != 0, (bvecs == 0).all(axis=1))
            if bad_mask.sum() > 0:
                pl = 's' if bad_mask.sum() != 1 else ''
                verbose('Removing %s DTI trace volume%s' %
                        (bad_mask.sum(), pl))
                good_mask = ~bad_mask
                in_data = in_data[..., good_mask]
                bvals = bvals[good_mask]
                bvecs = bvecs[good_mask]

    # Make corresponding NIfTI image
    nimg = nifti1.Nifti1Image(in_data, affine, pr_hdr)
    nhdr = nimg.header
    nhdr.set_data_dtype(out_dtype)
    nhdr.set_slope_inter(slope, intercept)
    nhdr.set_sform(affine, code=1)
    nhdr.set_qform(affine, code=1)

    if 'parse' in opts.minmax:
        # need to get the scaled data
        verbose('Loading (and scaling) the data to determine value range')
    if opts.minmax[0] == 'parse':
        nhdr['cal_min'] = in_data.min() * slope + intercept
    else:
        nhdr['cal_min'] = float(opts.minmax[0])
    if opts.minmax[1] == 'parse':
        nhdr['cal_max'] = in_data.max() * slope + intercept
    else:
        nhdr['cal_max'] = float(opts.minmax[1])

    # container for potential NIfTI1 header extensions
    if opts.store_header:
        # dump the full PAR header content into an extension
        with open(infile, 'rb') as fobj:  # contents must be bytes
            hdr_dump = fobj.read()
            dump_ext = nifti1.Nifti1Extension('comment', hdr_dump)
        nhdr.extensions.append(dump_ext)

    verbose('Writing %s' % outfilename)
    nibabel.save(nimg, outfilename)

    # write out bvals/bvecs if requested
    if opts.bvs:
        if bvals is None and bvecs is None:
            verbose('No DTI volumes detected, bvals and bvecs not written')
        elif bvecs is None:
            verbose('DTI volumes detected, but no diffusion direction info was'
                    'found.  Writing .bvals file only.')
            with open(basefilename + '.bvals', 'w') as fid:
                # np.savetxt could do this, but it's just a loop anyway
                for val in bvals:
                    fid.write('%s ' % val)
                fid.write('\n')
        else:
            verbose('Writing .bvals and .bvecs files')
            # Transform bvecs with reorientation affine
            orig2new = npl.inv(t_aff)
            bv_reorient = from_matvec(to_matvec(orig2new)[0], [0, 0, 0])
            bvecs = apply_affine(bv_reorient, bvecs)
            with open(basefilename + '.bvals', 'w') as fid:
                # np.savetxt could do this, but it's just a loop anyway
                for val in bvals:
                    fid.write('%s ' % val)
                fid.write('\n')
            with open(basefilename + '.bvecs', 'w') as fid:
                for row in bvecs.T:
                    for val in row:
                        fid.write('%s ' % val)
                    fid.write('\n')

    # export data labels varying along the 4th dimensions if requested
    if opts.vol_info:
        labels = pr_img.header.get_volume_labels()
        if len(labels) > 0:
            vol_keys = list(labels.keys())
            with open(basefilename + '.ordering.csv', 'w') as csvfile:
                csvwriter = csv.writer(csvfile, delimiter=',')
                csvwriter.writerow(vol_keys)
                for vals in zip(*[labels[k] for k in vol_keys]):
                    csvwriter.writerow(vals)

    # write out dwell time if requested
    if opts.dwell_time:
        try:
            dwell_time = calculate_dwell_time(pr_hdr.get_water_fat_shift(),
                                              pr_hdr.get_echo_train_length(),
                                              opts.field_strength)
        except MRIError:
            verbose('No EPI factors, dwell time not written')
        else:
            verbose('Writing dwell time (%r sec) calculated assuming %sT '
                    'magnet' % (dwell_time, opts.field_strength))
            with open(basefilename + '.dwell_time', 'w') as fid:
                fid.write('%r\n' % dwell_time)
Example #12
0
def test_resample_from_to():
    # Test resampling from image to image / image space
    data = np.arange(24).reshape((2, 3, 4))
    affine = np.diag([-4, 5, 6, 1])
    img = Nifti1Image(data, affine)
    img.header['descrip'] = 'red shirt image'
    out = resample_from_to(img, img)
    assert_almost_equal(img.dataobj, out.dataobj)
    assert_array_equal(img.affine, out.affine)
    # Check resampling reverses effect of flipping axes
    # This will also test translations
    flip_ornt = np.array([[0, 1], [1, 1], [2, 1]])
    for axis in (0, 1, 2):
        ax_flip_ornt = flip_ornt.copy()
        ax_flip_ornt[axis, 1] = -1
        aff_flip_i = inv_ornt_aff(ax_flip_ornt, (2, 3, 4))
        flipped_img = Nifti1Image(flip_axis(data, axis),
                                  np.dot(affine, aff_flip_i))
        out = resample_from_to(flipped_img, ((2, 3, 4), affine))
        assert_almost_equal(img.dataobj, out.dataobj)
        assert_array_equal(img.affine, out.affine)
    # A translation of one voxel on each axis
    trans_aff = from_matvec(np.diag([-4, 5, 6]), [4, -5, -6])
    trans_img = Nifti1Image(data, trans_aff)
    out = resample_from_to(trans_img, img)
    exp_out = np.zeros_like(data)
    exp_out[:-1, :-1, :-1] = data[1:, 1:, 1:]
    assert_almost_equal(out.dataobj, exp_out)
    out = resample_from_to(img, trans_img)
    trans_exp_out = np.zeros_like(data)
    trans_exp_out[1:, 1:, 1:] = data[:-1, :-1, :-1]
    assert_almost_equal(out.dataobj, trans_exp_out)
    # Test mode with translation of first axis only
    # Default 'constant' mode first
    trans1_aff = from_matvec(np.diag([-4, 5, 6]), [4, 0, 0])
    trans1_img = Nifti1Image(data, trans1_aff)
    out = resample_from_to(img, trans1_img)
    exp_out = np.zeros_like(data)
    exp_out[1:, :, :] = data[:-1, :, :]
    assert_almost_equal(out.dataobj, exp_out)
    # Then 'nearest' mode
    out = resample_from_to(img, trans1_img, mode='nearest')
    exp_out[0, :, :] = exp_out[1, :, :]
    assert_almost_equal(out.dataobj, exp_out)
    # Test order
    trans_p_25_aff = from_matvec(np.diag([-4, 5, 6]), [1, 0, 0])
    trans_p_25_img = Nifti1Image(data, trans_p_25_aff)
    # Suprising to me, but all points outside are set to 0, even with NN
    out = resample_from_to(img, trans_p_25_img, order=0)
    exp_out = np.zeros_like(data)
    exp_out[1:, :, :] = data[1, :, :]
    assert_almost_equal(out.dataobj, exp_out)
    out = resample_from_to(img, trans_p_25_img)
    exp_out = spnd.affine_transform(data, [1, 1, 1], [-0.25, 0, 0], order=3)
    assert_almost_equal(out.dataobj, exp_out)
    # Test cval
    out = resample_from_to(img, trans_img, cval=99)
    exp_out = np.zeros_like(data) + 99
    exp_out[1:, 1:, 1:] = data[:-1, :-1, :-1]
    assert_almost_equal(out.dataobj, exp_out)
    # Out class
    out = resample_from_to(img, trans_img)
    assert out.__class__ == Nifti1Image
    # By default, type of from_img makes no difference
    n1_img = Nifti2Image(data, affine)
    out = resample_from_to(n1_img, trans_img)
    assert out.__class__ == Nifti1Image
    # Passed as keyword arg
    out = resample_from_to(img, trans_img, out_class=Nifti2Image)
    assert out.__class__ == Nifti2Image
    # If keyword arg is None, use type of from_img
    out = resample_from_to(n1_img, trans_img, out_class=None)
    assert out.__class__ == Nifti2Image
    # to_img type irrelevant in all cases
    n1_trans_img = Nifti2Image(data, trans_aff)
    out = resample_from_to(img, n1_trans_img, out_class=None)
    assert out.__class__ == Nifti1Image
    # From 2D to 3D, error, the fixed affine is not invertible
    img_2d = Nifti1Image(data[:, :, 0], affine)
    with pytest.raises(AffineError):
        resample_from_to(img_2d, img)
    # 3D to 2D, we don't need to invert the fixed matrix
    out = resample_from_to(img, img_2d)
    assert_array_equal(out.dataobj, data[:, :, 0])
    # Same for tuple as to_img imput
    out = resample_from_to(img, (img_2d.shape, img_2d.affine))
    assert_array_equal(out.dataobj, data[:, :, 0])
    # 4D input and output also OK
    data_4d = np.arange(24 * 5).reshape((2, 3, 4, 5))
    img_4d = Nifti1Image(data_4d, affine)
    out = resample_from_to(img_4d, img_4d)
    assert_almost_equal(data_4d, out.dataobj)
    assert_array_equal(img_4d.affine, out.affine)
    # Errors trying to match 3D to 4D
    with pytest.raises(ValueError):
        resample_from_to(img_4d, img)
    with pytest.raises(ValueError):
        resample_from_to(img, img_4d)
Example #13
0
def test_resample_from_to():
    # Test resampling from image to image / image space
    data = np.arange(24).reshape((2, 3, 4))
    affine = np.diag([-4, 5, 6, 1])
    img = Nifti1Image(data, affine)
    img.header['descrip'] = 'red shirt image'
    out = resample_from_to(img, img)
    assert_almost_equal(img.dataobj, out.dataobj)
    assert_array_equal(img.affine, out.affine)
    # Check resampling reverses effect of flipping axes
    # This will also test translations
    flip_ornt = np.array([[0, 1], [1, 1], [2, 1]])
    for axis in (0, 1, 2):
        ax_flip_ornt = flip_ornt.copy()
        ax_flip_ornt[axis, 1] = -1
        aff_flip_i = inv_ornt_aff(ax_flip_ornt, (2, 3, 4))
        flipped_img = Nifti1Image(flip_axis(data, axis),
                                  np.dot(affine, aff_flip_i))
        out = resample_from_to(flipped_img, ((2, 3, 4), affine))
        assert_almost_equal(img.dataobj, out.dataobj)
        assert_array_equal(img.affine, out.affine)
    # A translation of one voxel on each axis
    trans_aff = from_matvec(np.diag([-4, 5, 6]), [4, -5, -6])
    trans_img = Nifti1Image(data, trans_aff)
    out = resample_from_to(trans_img, img)
    exp_out = np.zeros_like(data)
    exp_out[:-1, :-1, :-1] = data[1:, 1:, 1:]
    assert_almost_equal(out.dataobj, exp_out)
    out = resample_from_to(img, trans_img)
    trans_exp_out = np.zeros_like(data)
    trans_exp_out[1:, 1:, 1:] = data[:-1, :-1, :-1]
    assert_almost_equal(out.dataobj, trans_exp_out)
    # Test mode with translation of first axis only
    # Default 'constant' mode first
    trans1_aff = from_matvec(np.diag([-4, 5, 6]), [4, 0, 0])
    trans1_img = Nifti1Image(data, trans1_aff)
    out = resample_from_to(img, trans1_img)
    exp_out = np.zeros_like(data)
    exp_out[1:, :, :] = data[:-1, :, :]
    assert_almost_equal(out.dataobj, exp_out)
    # Then 'nearest' mode
    out = resample_from_to(img, trans1_img, mode='nearest')
    exp_out[0, :, :] = exp_out[1, :, :]
    assert_almost_equal(out.dataobj, exp_out)
    # Test order
    trans_p_25_aff = from_matvec(np.diag([-4, 5, 6]), [1, 0, 0])
    trans_p_25_img = Nifti1Image(data, trans_p_25_aff)
    # Suprising to me, but all points outside are set to 0, even with NN
    out = resample_from_to(img, trans_p_25_img, order=0)
    exp_out = np.zeros_like(data)
    exp_out[1:, :, :] = data[1, :, :]
    assert_almost_equal(out.dataobj, exp_out)
    out = resample_from_to(img, trans_p_25_img)
    exp_out = spnd.affine_transform(data, [1, 1, 1], [-0.25, 0, 0], order=3)
    assert_almost_equal(out.dataobj, exp_out)
    # Test cval
    out = resample_from_to(img, trans_img, cval=99)
    exp_out = np.zeros_like(data) + 99
    exp_out[1:, 1:, 1:] = data[:-1, :-1, :-1]
    assert_almost_equal(out.dataobj, exp_out)
    # Out class
    out = resample_from_to(img, trans_img)
    assert_equal(out.__class__, Nifti1Image)
    # By default, type of from_img makes no difference
    n1_img = Nifti2Image(data, affine)
    out = resample_from_to(n1_img, trans_img)
    assert_equal(out.__class__, Nifti1Image)
    # Passed as keyword arg
    out = resample_from_to(img, trans_img, out_class=Nifti2Image)
    assert_equal(out.__class__, Nifti2Image)
    # If keyword arg is None, use type of from_img
    out = resample_from_to(n1_img, trans_img, out_class=None)
    assert_equal(out.__class__, Nifti2Image)
    # to_img type irrelevant in all cases
    n1_trans_img = Nifti2Image(data, trans_aff)
    out = resample_from_to(img, n1_trans_img, out_class=None)
    assert_equal(out.__class__, Nifti1Image)
    # From 2D to 3D, error, the fixed affine is not invertible
    img_2d = Nifti1Image(data[:, :, 0], affine)
    assert_raises(AffineError, resample_from_to, img_2d, img)
    # 3D to 2D, we don't need to invert the fixed matrix
    out = resample_from_to(img, img_2d)
    assert_array_equal(out.dataobj, data[:, :, 0])
    # Same for tuple as to_img imput
    out = resample_from_to(img, (img_2d.shape, img_2d.affine))
    assert_array_equal(out.dataobj, data[:, :, 0])
    # 4D input and output also OK
    data_4d = np.arange(24 * 5).reshape((2, 3, 4, 5))
    img_4d = Nifti1Image(data_4d, affine)
    out = resample_from_to(img_4d, img_4d)
    assert_almost_equal(data_4d, out.dataobj)
    assert_array_equal(img_4d.affine, out.affine)
    # Errors trying to match 3D to 4D
    assert_raises(ValueError, resample_from_to, img_4d, img)
    assert_raises(ValueError, resample_from_to, img, img_4d)
Example #14
0
def proc_file(infile, opts):
    # figure out the output filename, and see if it exists
    basefilename = splitext_addext(os.path.basename(infile))[0]
    if opts.outdir is not None:
        # set output path
        basefilename = os.path.join(opts.outdir, basefilename)

    # prep a file
    if opts.compressed:
        verbose("Using gzip compression")
        outfilename = basefilename + ".nii.gz"
    else:
        outfilename = basefilename + ".nii"
    if os.path.isfile(outfilename) and not opts.overwrite:
        raise IOError('Output file "%s" exists, use --overwrite to ' "overwrite it" % outfilename)

    # load the PAR header and data
    scaling = "dv" if opts.scaling == "off" else opts.scaling
    infile = fname_ext_ul_case(infile)
    pr_img = pr.load(infile, permit_truncated=opts.permit_truncated, scaling=scaling, strict_sort=opts.strict_sort)
    pr_hdr = pr_img.header
    affine = pr_hdr.get_affine(origin=opts.origin)
    slope, intercept = pr_hdr.get_data_scaling(scaling)
    if opts.scaling != "off":
        verbose('Using data scaling "%s"' % opts.scaling)
    # get original scaling, and decide if we scale in-place or not
    if opts.scaling == "off":
        slope = np.array([1.0])
        intercept = np.array([0.0])
        in_data = pr_img.dataobj.get_unscaled()
        out_dtype = pr_hdr.get_data_dtype()
    elif not np.any(np.diff(slope)) and not np.any(np.diff(intercept)):
        # Single scalefactor case
        slope = slope.ravel()[0]
        intercept = intercept.ravel()[0]
        in_data = pr_img.dataobj.get_unscaled()
        out_dtype = pr_hdr.get_data_dtype()
    else:
        # Multi scalefactor case
        slope = np.array([1.0])
        intercept = np.array([0.0])
        in_data = np.array(pr_img.dataobj)
        out_dtype = np.float64
    # Reorient data block to LAS+ if necessary
    ornt = io_orientation(np.diag([-1, 1, 1, 1]).dot(affine))
    if np.all(ornt == [[0, 1], [1, 1], [2, 1]]):  # already in LAS+
        t_aff = np.eye(4)
    else:  # Not in LAS+
        t_aff = inv_ornt_aff(ornt, pr_img.shape)
        affine = np.dot(affine, t_aff)
        in_data = apply_orientation(in_data, ornt)

    bvals, bvecs = pr_hdr.get_bvals_bvecs()
    if not opts.keep_trace:  # discard Philips DTI trace if present
        if bvecs is not None:
            bad_mask = np.logical_and(bvals != 0, (bvecs == 0).all(axis=1))
            if bad_mask.sum() > 0:
                pl = "s" if bad_mask.sum() != 1 else ""
                verbose("Removing %s DTI trace volume%s" % (bad_mask.sum(), pl))
                good_mask = ~bad_mask
                in_data = in_data[..., good_mask]
                bvals = bvals[good_mask]
                bvecs = bvecs[good_mask]

    # Make corresponding NIfTI image
    nimg = nifti1.Nifti1Image(in_data, affine, pr_hdr)
    nhdr = nimg.header
    nhdr.set_data_dtype(out_dtype)
    nhdr.set_slope_inter(slope, intercept)
    nhdr.set_sform(affine, code=1)
    nhdr.set_qform(affine, code=1)

    if "parse" in opts.minmax:
        # need to get the scaled data
        verbose("Loading (and scaling) the data to determine value range")
    if opts.minmax[0] == "parse":
        nhdr["cal_min"] = in_data.min() * slope + intercept
    else:
        nhdr["cal_min"] = float(opts.minmax[0])
    if opts.minmax[1] == "parse":
        nhdr["cal_max"] = in_data.max() * slope + intercept
    else:
        nhdr["cal_max"] = float(opts.minmax[1])

    # container for potential NIfTI1 header extensions
    if opts.store_header:
        # dump the full PAR header content into an extension
        with open(infile, "rb") as fobj:  # contents must be bytes
            hdr_dump = fobj.read()
            dump_ext = nifti1.Nifti1Extension("comment", hdr_dump)
        nhdr.extensions.append(dump_ext)

    verbose("Writing %s" % outfilename)
    nibabel.save(nimg, outfilename)

    # write out bvals/bvecs if requested
    if opts.bvs:
        if bvals is None and bvecs is None:
            verbose("No DTI volumes detected, bvals and bvecs not written")
        elif bvecs is None:
            verbose("DTI volumes detected, but no diffusion direction info was" "found.  Writing .bvals file only.")
            with open(basefilename + ".bvals", "w") as fid:
                # np.savetxt could do this, but it's just a loop anyway
                for val in bvals:
                    fid.write("%s " % val)
                fid.write("\n")
        else:
            verbose("Writing .bvals and .bvecs files")
            # Transform bvecs with reorientation affine
            orig2new = npl.inv(t_aff)
            bv_reorient = from_matvec(to_matvec(orig2new)[0], [0, 0, 0])
            bvecs = apply_affine(bv_reorient, bvecs)
            with open(basefilename + ".bvals", "w") as fid:
                # np.savetxt could do this, but it's just a loop anyway
                for val in bvals:
                    fid.write("%s " % val)
                fid.write("\n")
            with open(basefilename + ".bvecs", "w") as fid:
                for row in bvecs.T:
                    for val in row:
                        fid.write("%s " % val)
                    fid.write("\n")

    # export data labels varying along the 4th dimensions if requested
    if opts.vol_info:
        labels = pr_img.header.get_volume_labels()
        if len(labels) > 0:
            vol_keys = list(labels.keys())
            with open(basefilename + ".ordering.csv", "w") as csvfile:
                csvwriter = csv.writer(csvfile, delimiter=",")
                csvwriter.writerow(vol_keys)
                for vals in zip(*[labels[k] for k in vol_keys]):
                    csvwriter.writerow(vals)

    # write out dwell time if requested
    if opts.dwell_time:
        try:
            dwell_time = calculate_dwell_time(
                pr_hdr.get_water_fat_shift(), pr_hdr.get_echo_train_length(), opts.field_strength
            )
        except MRIError:
            verbose("No EPI factors, dwell time not written")
        else:
            verbose("Writing dwell time (%r sec) calculated assuming %sT " "magnet" % (dwell_time, opts.field_strength))
            with open(basefilename + ".dwell_time", "w") as fid:
                fid.write("%r\n" % dwell_time)