Example #1
0
def test_img_data_dtype():
    # Ignoring complex, binary, 128+ bit, RGBA
    nifti1_dtypes = (np.uint8, np.uint16, np.uint32, np.uint64, np.int8,
                     np.int16, np.int32, np.int64, np.float32, np.float64)
    dtype_matches = []
    with InTemporaryDirectory():
        for logical_dtype in nifti1_dtypes:
            dataobj = np.random.uniform(0, 255,
                                        size=(2, 2, 2)).astype(logical_dtype)
            for on_disk_dtype in nifti1_dtypes:
                img = Nifti1Image(dataobj, np.eye(4))
                img.set_data_dtype(on_disk_dtype)
                img.to_filename('test.nii')
                loaded = nb.load('test.nii')
                # To verify later that sometimes these differ meaningfully
                dtype_matches.append(
                    loaded.get_data_dtype() == niimg.img_data_dtype(loaded))
                assert (np.array(
                    loaded.dataobj).dtype == niimg.img_data_dtype(loaded))
    # Verify that the distinction is worth making
    assert any(dtype_matches)
    assert not all(dtype_matches)
Example #2
0
 def rescale(self, nbins=255, numproc=2, overwrite=True):
     """Method to convert the brain images from MRI signals to a given pixel value scale
     
     :param nbins: {int} integer representing the number of bins generated.
     :param numproc: {int} number of parallel processes applied to rescale.
     :param overwrite: {bool} whether the original image in the attribute :py:attr:`img` should be overwritten.
     :return:
     """
     print("Rescaling pixel intensity range...")
     try:
         p = Pool(numproc)  # number of parallel processes
         self.data = np.array(
             p.map(partial(_rescale, nbins, np.max(self.img.dataobj)),
                   self.img.dataobj.T.astype('float64'))).T
     finally:
         p.close()
         p.join()
     if overwrite:
         self.img = Nifti1Image(self.data, self.img.affine)
         self.data = None
     self.img.uncache()
     print("\tRescaled!")
def test_iterator_generator():
    # Create a list of random images
    l = [
        Nifti1Image(np.random.random((10, 10, 10)), np.eye(4))
        for i in range(10)
    ]
    cc = _utils.concat_niimgs(l)
    assert_equal(cc.shape[-1], 10)
    assert_array_almost_equal(cc.get_data()[..., 0], l[0].get_data())

    # Same with iteration
    i = image.iter_img(l)
    cc = _utils.concat_niimgs(i)
    assert_equal(cc.shape[-1], 10)
    assert_array_almost_equal(cc.get_data()[..., 0], l[0].get_data())

    # Now, a generator
    b = []
    g = nifti_generator(b)
    cc = _utils.concat_niimgs(g)
    assert_equal(cc.shape[-1], 10)
    assert_equal(len(b), 10)
    def combine_brains(self, slices=None):
        """Method to combine all brains in the loaded filename dictionary into a big data object with the individual
        brain data in the 4th dimension.

        :param slices: {int} number of slices to load from each brain (testing purpose). The average from ±1 slice
            is loaded for every slice.
        :return: data of all files loaded into the attribute :py:attr:`data`
        """
        print("Loading %i files..." % len(self.names))
        self.img = load_img(self.filenames)
        if slices:
            step = int(float(self.img.shape[2]) / float(slices + 1))
            newshape = list(self.img.shape)
            newshape[2] = slices
            imgarr = np.empty(shape=tuple(newshape))
            for s, img in enumerate(self.img.dataobj.T):
                for i in range(1, slices + 1):
                    imgarr[..., i - 1, s] = np.mean(img.T[..., (i * step - 1):(i * step + 1)], axis=2)
            self.img = Nifti1Image(imgarr.reshape((self.img.shape[0], self.img.shape[1], slices, len(self.filenames))),
                                   self.img.affine)
        self.img.uncache()
        print("\tAll files loaded!")
Example #5
0
def apply_mask(niimgs, mask_img, dtype=np.float32,
               smoothing_fwhm=None, ensure_finite=True):
    """Extract signals from images using specified mask.

    Read the time series from the given nifti images or filepaths,
    using the mask.

    Parameters
    -----------
    niimgs: list of 4D nifti images
        Images to be masked. list of lists of 3D images are also accepted.

    mask_img: niimg
        3D mask array: True where a voxel should be used.

    smoothing_fwhm: float
        (optional) Gives the size of the spatial smoothing to apply to
        the signal, in voxels. Implies ensure_finite=True.

    ensure_finite: bool
        If ensure_finite is True (default), the non-finite values (NaNs and
        infs) found in the images will be replaced by zeros.

    Returns
    --------
    session_series: numpy.ndarray
        2D array of series with shape (image number, voxel number)

    Notes
    -----
    When using smoothing, ensure_finite is set to True, as non-finite
    values would spread accross the image.
    """
    mask, mask_affine = _load_mask_img(mask_img)
    mask_img = Nifti1Image(_utils.as_ndarray(mask, dtype=np.int8),
                           mask_affine)
    return _apply_mask_fmri(niimgs, mask_img, dtype=dtype,
                            smoothing_fwhm=smoothing_fwhm,
                            ensure_finite=ensure_finite)
Example #6
0
def test_first_level_with_scaling():
    shapes, rk = [(3, 1, 1, 2)], 1
    fmri_data = list()
    fmri_data.append(Nifti1Image(np.zeros((1, 1, 1, 2)) + 6, np.eye(4)))
    design_matrices = list()
    design_matrices.append(
        pd.DataFrame(np.ones((shapes[0][-1], rk)),
                     columns=list('abcdefghijklmnopqrstuvwxyz')[:rk]))
    fmri_glm = FirstLevelModel(mask_img=False,
                               noise_model='ols',
                               signal_scaling=0,
                               minimize_memory=True)
    assert fmri_glm.signal_scaling == 0
    assert not fmri_glm.standardize
    with pytest.warns(DeprecationWarning,
                      match="Deprecated. `scaling_axis` will be removed"):
        assert fmri_glm.scaling_axis == 0
    glm_parameters = fmri_glm.get_params()
    test_glm = FirstLevelModel(**glm_parameters)
    fmri_glm = fmri_glm.fit(fmri_data, design_matrices=design_matrices)
    test_glm = test_glm.fit(fmri_data, design_matrices=design_matrices)
    assert glm_parameters['signal_scaling'] == 0
def test_get_cut_slices():

    # Generate simple simulated data with one "spot"
    img, data = _simulate_img()

    # Use automatic selection of coordinates
    cut_slices = bp._get_cut_slices(img, cut_coords=None, threshold=None)
    assert (cut_slices == [4, 4, 4]).all()

    # Check that using a single number for cut_coords raises an error
    with pytest.raises(ValueError):
        bp._get_cut_slices(img, cut_coords=4, threshold=None)

    # Check that it is possible to manually specify coordinates
    cut_slices = bp._get_cut_slices(img, cut_coords=[2, 2, 2], threshold=None)
    assert (cut_slices == [2, 2, 2]).all()

    # Check that the affine does not change where the cut is done
    affine = 2 * np.eye(4)
    img = Nifti1Image(data, affine)
    cut_slices = bp._get_cut_slices(img, cut_coords=None, threshold=None)
    assert (cut_slices == [4, 4, 4]).all()
Example #8
0
def run_preprocessing(image_file, output_file):
    # Load and crop
    img = check_niimg(image_file, ensure_ndim=4)
    img_data = img.get_data()
    img_data = img_data[:, :, :, 17:]  # Initial scanner setup
    img_data = img_data[:, :, :, 27:]  # Starting cartoon
    img_data = img_data[:, :, :, :975]
    print('Data matrix shape: ' + str(img_data.shape))

    img_data = np.reshape(img_data, (245245, img_data.shape[3]))

    # Normalize data
    Y = zscore(img_data).T

    # Detrend data
    Y = detrend_data(Y)
    print('Detrended data matrix: ' + str(Y.shape))

    Y = np.reshape(Y.T, (65, 77, 49, 975))

    r_squared_img = Nifti1Image(Y, affine=img.affine)
    r_squared_img.to_filename(output_file)
Example #9
0
def test_compute_brain_mask():
    # Check masker for template masking strategy

    img = np.random.rand(9, 9, 5)
    img = Nifti1Image(img, np.eye(4))

    masker = NiftiMasker(mask_strategy='template')

    masker.fit(img)
    mask1 = masker.mask_img_

    masker2 = NiftiMasker(mask_strategy='template',
                          mask_args=dict(threshold=0.))

    masker2.fit(img)
    mask2 = masker2.mask_img_

    mask_ref = np.zeros((9, 9, 5))
    mask_ref[2:7, 2:7, 2] = 1

    np.testing.assert_array_equal(get_data(mask1), mask_ref)
    np.testing.assert_array_equal(get_data(mask2), mask_ref)
Example #10
0
def test_raises_bbox_error_if_data_outside_box():
    # Make some cases which should raise exceptions

    # original image
    data = np.zeros([8, 9, 10])
    affine = np.eye(4)
    affine_offset = np.array([1, 1, 1])
    affine[:3, 3] = affine_offset

    img = Nifti1Image(data, affine)

    # some axis flipping affines
    axis_flips = np.array(
        map(np.diag, [[-1, 1, 1, 1], [1, -1, 1, 1], [1, 1, -1, 1],
                      [-1, -1, 1, 1], [-1, 1, -1, 1], [1, -1, -1, 1]]))

    # some in plane 90 degree rotations base on these
    # (by permuting two lines)
    af = axis_flips
    rotations = np.array([
        af[0][[1, 0, 2, 3]], af[0][[2, 1, 0, 3]], af[1][[1, 0, 2, 3]],
        af[1][[0, 2, 1, 3]], af[2][[2, 1, 0, 3]], af[2][[0, 2, 1, 3]]
    ])

    new_affines = np.concatenate([axis_flips, rotations])
    new_offset = np.array([0., 0., 0.])
    new_affines[:, :3, 3] = new_offset[np.newaxis, :]

    for new_affine in new_affines:
        exception = BoundingBoxError
        message = ("The field of view given "
                   "by the target affine does "
                   "not contain any of the data")

        testing.assert_raises_regexp(exception,
                                     message,
                                     resample_img,
                                     img,
                                     target_affine=new_affine)
Example #11
0
def unmask(X, mask_img, order="F"):
    """Take masked data and bring them back into 3D/4D

    This function can be applied to a list of masked data.

    Parameters
    ==========
    X: numpy.ndarray (or list of)
        Masked data. shape: (samples #, features #).
        If X is one-dimensional, it is assumed that samples# == 1.
    mask_img: nifti-like image
        Mask. Must be 3-dimensional.

    Returns
    =======
    data: nifti-like image (or list of)
        Unmasked data. Depending on the shape of X, data can have
        different shapes:

        - X.ndim == 2:
          Shape: (mask.shape[0], mask.shape[1], mask.shape[2], X.shape[0])
        - X.ndim == 1:
          Shape: (mask.shape[0], mask.shape[1], mask.shape[2])
    """

    if isinstance(X, list):
        ret = []
        for x in X:
            ret.append(unmask(x, mask_img, order=order))  # 1-level recursion
        return ret

    mask, affine = _load_mask_img(mask_img)

    if X.ndim == 2:
        unmasked = _unmask_nd(X, mask, order=order)
    elif X.ndim == 1:
        unmasked = _unmask_3d(X, mask, order=order)
    return Nifti1Image(unmasked, affine)
Example #12
0
    def save(self, type=''):
        """
        Write an image in a nifti file
        :param type:    if not set, the image is saved in the same type as input data
                        if 'minimize', image space is minimize
                        (2, 'uint8', np.uint8, "NIFTI_TYPE_UINT8"),
                        (4, 'int16', np.int16, "NIFTI_TYPE_INT16"),
                        (8, 'int32', np.int32, "NIFTI_TYPE_INT32"),
                        (16, 'float32', np.float32, "NIFTI_TYPE_FLOAT32"),
                        (32, 'complex64', np.complex64, "NIFTI_TYPE_COMPLEX64"),
                        (64, 'float64', np.float64, "NIFTI_TYPE_FLOAT64"),
                        (256, 'int8', np.int8, "NIFTI_TYPE_INT8"),
                        (512, 'uint16', np.uint16, "NIFTI_TYPE_UINT16"),
                        (768, 'uint32', np.uint32, "NIFTI_TYPE_UINT32"),
                        (1024,'int64', np.int64, "NIFTI_TYPE_INT64"),
                        (1280, 'uint64', np.uint64, "NIFTI_TYPE_UINT64"),
                        (1536, 'float128', _float128t, "NIFTI_TYPE_FLOAT128"),
                        (1792, 'complex128', np.complex128, "NIFTI_TYPE_COMPLEX128"),
                        (2048, 'complex256', _complex256t, "NIFTI_TYPE_COMPLEX256"),
        """
        from nibabel import Nifti1Image, save
        from sct_utils import printv
        if type != '':
            self.changeType(type)
        if self.hdr:
            self.hdr.set_data_shape(self.data.shape)
        img = Nifti1Image(self.data, None, self.hdr)
        #printv('saving ' + self.path + self.file_name + self.ext + '\n', self.verbose)

        from os import path, remove
        fname_out = self.path + self.file_name + self.ext
        if path.isfile(fname_out):
            printv(
                'WARNING: File ' + fname_out + ' already exists. Deleting it.',
                1, 'warning')
            remove(fname_out)
        # save file
        save(img, fname_out)
Example #13
0
def savenifti(arr, filename, niftiobj=None, affine=None, header=None):
    '''
    savenifti(arr, filename, niftiobj=None, affine=None, header=None):

    save 3D or 4D <array> into a full nifti filename using <filename>. We used
    the affine and header information from <niftiobj>.

    Note that the file name can have either the ext as '.nii' or '.nii.gz'. nibabel
    can take care of it.

    If <affine> or <header> is supplied, they will overwrite information from the <niftiobj>

    20180622 RZ add support for affine and header

    '''
    from nibabel import save, Nifti1Image
    from numpy import ndarray
    from RZutilpy.system import makedirs, Path

    # check input
    assert isinstance(
        arr,
        ndarray) and (3 <= arr.ndim <= 4), 'Please input 3d or 4d an ndarray!'

    # make the dir if it does not exist
    filename = Path(filename) if ~isinstance(filename, Path) else filename
    makedirs(filename)  # note here we add a os.sep

    if affine is None or header is None:
        assert isinstance(
            niftiobj,
            Nifti1Image), 'affine or header is none, you must supply niftiobj'

    # get affine and header information
    affine = niftiobj.affine.copy() if affine is None else affine.copy()
    header = niftiobj.header.copy() if header is None else header.copy()

    save(Nifti1Image(arr, affine, header), filename.str)
Example #14
0
def generate_warping_field(fname_dest,
                           warp_x,
                           warp_y,
                           fname_warp='warping_field.nii.gz',
                           verbose=1):
    """
    Generate an ITK warping field
    :param fname_dest:
    :param warp_x:
    :param warp_y:
    :param fname_warp:
    :param verbose:
    :return:
    """
    sct.printv('\nGenerate warping field...', verbose)

    # Get image dimensions
    # sct.printv('Get destination dimension', verbose)
    nx, ny, nz, nt, px, py, pz, pt = Image(fname_dest).dim
    # sct.printv('  matrix size: '+str(nx)+' x '+str(ny)+' x '+str(nz), verbose)
    # sct.printv('  voxel size:  '+str(px)+'mm x '+str(py)+'mm x '+str(pz)+'mm', verbose)

    # initialize
    data_warp = np.zeros((nx, ny, nz, 1, 3))

    # fill matrix
    data_warp[:, :, :, 0, 0] = -warp_x  # need to invert due to ITK conventions
    data_warp[:, :, :, 0, 1] = -warp_y  # need to invert due to ITK conventions

    # save warping field
    im_dest = load(fname_dest)
    hdr_dest = im_dest.get_header()
    hdr_warp = hdr_dest.copy()
    hdr_warp.set_intent('vector', (), '')
    hdr_warp.set_data_dtype('float32')
    img = Nifti1Image(data_warp, None, hdr_warp)
    save(img, fname_warp)
    sct.printv(' --> ' + fname_warp, verbose)
Example #15
0
def test_plot_stat_map_threshold_for_affine_with_rotation():
    """Tests for plot_stat_map with thresholding and resampling.

    Threshold was not being applied when affine has a rotation.
    See https://github.com/nilearn/nilearn/issues/599 for more details.
    """
    rng = np.random.RandomState(42)
    data = rng.standard_normal(size=(10, 10, 10))
    # matrix with rotation
    affine = np.array([[-3., 1., 0., 1.],
                       [-1., -3., 0., -2.],
                       [0., 0., 3., 3.],
                       [0., 0., 0., 1.]])
    img = Nifti1Image(data, affine)
    display = plot_stat_map(img, bg_img=None, threshold=1.,
                            display_mode='z', cut_coords=1)
    # Next two lines retrieve the numpy array from the plot
    ax = list(display.axes.values())[0].ax
    plotted_array = ax.images[0].get_array()
    # Given the high threshold the array should be partly masked
    assert plotted_array.mask.any()
    # Save execution time and memory
    plt.close()
Example #16
0
def mri_nan2zero(input_nii):
    """Remove NaN values and turn them into zeros (so that Freesurfer can handle
    them)

    Parameters
    ----------
    input_nii : Path
        path to nii.gz containing NaN

    Returns
    -------
    Path
        path to temporary nii.gz containing no NaN. You can remove the file
        with .unlink()
    """
    img = nload(str(input_nii))
    dat = img.get_data()
    dat[isnan(dat)] = 0
    img = Nifti1Image(dat, img.affine)

    tmp_nii = mkstemp(suffix='.nii.gz')[1]
    img.to_filename(tmp_nii)
    return Path(tmp_nii)
Example #17
0
def test_empty_report():
    # Data for NiftiMasker
    data = np.zeros((9, 9, 9))
    data[3:-3, 3:-3, 3:-3] = 10
    data_img_3d = Nifti1Image(data, np.eye(4))
    # Data for NiftiLabelsMasker
    shape = (13, 11, 12)
    affine = np.diag([2, 2, 2, 1])
    n_regions = 9
    labels_img = data_gen.generate_labeled_regions(shape,
                                                   affine=affine,
                                                   n_regions=n_regions)
    # turn off reporting
    maskers = [input_data.NiftiMasker(reports=False),
               input_data.NiftiLabelsMasker(labels_img, reports=False)]
    for masker in maskers:
        masker.fit(data_img_3d)
        assert masker._reporting_data is None
        assert masker._reporting() == [None]
        with pytest.warns(UserWarning,
                          match=("Report generation not enabled ! "
                                 "No visual outputs will be created.")):
            masker.generate_report()
Example #18
0
def test_3x3_affine_bbox():
    # Test that the bounding-box is properly computed when
    # transforming with a negative affine component
    # This is specifically to test for a change in behavior between
    # scipy < 0.18 and scipy >= 0.18, which is an interaction between
    # offset and a diagonal affine
    image = np.ones((20, 30))
    source_affine = np.eye(4)
    # Give the affine an offset
    source_affine[:2, 3] = np.array([96, 64])

    # We need to turn this data into a nibabel image
    img = Nifti1Image(image[:, :, np.newaxis], affine=source_affine)

    target_affine_3x3 = np.eye(3) * 2
    # One negative axes
    target_affine_3x3[1] *= -1

    img_3d_affine = resample_img(img, target_affine=target_affine_3x3)

    # If the bounding box is computed wrong, the image will be only
    # zeros
    np.testing.assert_allclose(img_3d_affine.get_data().max(), image.max())
Example #19
0
def test_resample_img_segmentation_fault():
    if os.environ.get('APPVEYOR') == 'True':
        raise SkipTest('This test too slow (7-8 minutes) on AppVeyor')

    # see https://github.com/nilearn/nilearn/issues/346
    shape_in = (64, 64, 64)
    aff_in = np.diag([2., 2., 2., 1.])
    aff_out = np.diag([3., 3., 3., 1.])
    # fourth_dim = 1024 works fine but for 1025 creates a segmentation
    # fault with scipy < 0.14.1
    fourth_dim = 1025

    try:
        data = np.ones(shape_in + (fourth_dim, ), dtype=np.float64)
    except MemoryError:
        # This can happen on AppVeyor and for 32-bit Python on Windows
        raise SkipTest('Not enough RAM to run this test')

    img_in = Nifti1Image(data, aff_in)

    resample_img(img_in,
                 target_affine=aff_out,
                 interpolation='nearest')
def test_iterator_generator():
    # Create a list of random images
    rng = np.random.RandomState(42)
    list_images = [
        Nifti1Image(rng.random_sample((10, 10, 10)), np.eye(4))
        for i in range(10)
    ]
    cc = _utils.concat_niimgs(list_images)
    assert cc.shape[-1] == 10
    assert_array_almost_equal(get_data(cc)[..., 0], get_data(list_images[0]))

    # Same with iteration
    i = image.iter_img(list_images)
    cc = _utils.concat_niimgs(i)
    assert cc.shape[-1] == 10
    assert_array_almost_equal(get_data(cc)[..., 0], get_data(list_images[0]))

    # Now, a generator
    b = []
    g = nifti_generator(b)
    cc = _utils.concat_niimgs(g)
    assert cc.shape[-1] == 10
    assert len(b) == 10
Example #21
0
def run_analysis(image_file, event_file, output_file, mask_file=None):
    # Load functional data
    img = check_niimg(image_file, ensure_ndim=4)
    img_data = img.get_data()
    print('Data matrix shape: ' + str(img_data.shape))

    # Apply mask if provided
    if mask_file:
        mask = check_niimg(mask_file, ensure_ndim=3).get_data().astype(bool)
        img_data = img_data[mask]
        print('Masked data matrix shape: ' + str(img_data.shape))
    else:
        img_data = np.reshape(img_data, (245245, img_data.shape[3]))

    # Get design matrix from nistats
    dm = get_design_matrix(event_file, img_data.shape[1])
    print('Design matrix shape: ' + str(dm.shape))

    # Normalize design matrix (data normalized in preprocessing)
    X = zscore(dm.as_matrix().T).T
    if PLOT:
        plt.plot(X)
        plt.show()

    # Fit and compute R squareds
    weights, r_squared = compute_rsquared(X, img_data.T)
    print('R squared matrix shape: ' + str(r_squared.shape))

    # Output results
    if mask_file:
        output = np.zeros((65, 77, 49))
        output[mask] = r_squared
    else:
        output = np.reshape(r_squared, (65, 77, 49))
        output[output == 1.0] = 0.0
    r_squared_img = Nifti1Image(output, affine=img.affine)
    r_squared_img.to_filename(output_file)
Example #22
0
def decfa(img_orig):
    """
    Create a nifti-compliant directional-encoded color FA file.

    Parameters
    ----------
    data : Nifti1Image class instance.
        Contains encoding of the DEC FA image with a 4D volume of data, where
        the elements on the last dimension represent R, G and B components.

    Returns
    -------
    img : Nifti1Image class instance.


    Notes
    -----
    For a description of this format, see:

    https://nifti.nimh.nih.gov/nifti-1/documentation/nifti1fields/nifti1fields_pages/datatype.html
    """

    dest_dtype = np.dtype([('R', 'uint8'), ('G', 'uint8'), ('B', 'uint8')])
    out_data = np.zeros(img_orig.shape[:3], dtype=dest_dtype)

    data_orig = img_orig.get_data()

    for ii in np.ndindex(img_orig.shape[:3]):
        val = data_orig[ii]
        out_data[ii] = (val[0], val[1], val[2])

    new_hdr = img_orig.get_header()
    new_hdr['dim'][4] = 1
    new_hdr.set_intent(1001, name='Color FA')
    new_hdr.set_data_dtype(dest_dtype)

    return Nifti1Image(out_data, affine=img_orig.affine, header=new_hdr)
Example #23
0
def decfa_to_float(img_orig):
    """
    Convert a nifti-compliant directional-encoded color FA image into a
    nifti image with RGB encoded in floating point resolution.

    Parameters
    ----------
    img_orig : Nifti1Image class instance.
        Contains encoding of the DEC FA image with a 3D volume of data, where
        each element is a (R, G, B) tuple in uint8.

    Returns
    -------
    img : Nifti1Image class instance with float dtype.

    Notes
    -----
    For a description of this format, see:

    https://nifti.nimh.nih.gov/nifti-1/documentation/nifti1fields/nifti1fields_pages/datatype.html
    """

    data_orig = np.asanyarray(img_orig.dataobj)
    out_data = np.zeros(data_orig.shape + (3, ), dtype=np.uint8)

    for ii in np.ndindex(img_orig.shape[:3]):
        val = data_orig[ii]
        out_data[ii] = np.array([val[0], val[1], val[2]])

    new_hdr = img_orig.header
    new_hdr['dim'][4] = 3

    # Remove the original intent
    new_hdr.set_intent(0)
    new_hdr.set_data_dtype(np.float)

    return Nifti1Image(out_data, affine=img_orig.affine, header=new_hdr)
Example #24
0
def test_compute_multi_gray_matter_mask():
    pytest.raises(TypeError, compute_multi_gray_matter_mask, [])

    # Check error raised if images with different shapes are given as input
    imgs = [Nifti1Image(np.ones((9, 9, 9)), np.eye(4)),
            Nifti1Image(np.ones((9, 9, 8)), np.eye(4))]
    pytest.raises(ValueError, compute_multi_gray_matter_mask, imgs)

    # Check results are the same if affine is the same
    imgs1 = [Nifti1Image(np.random.randn(9, 9, 9), np.eye(4)),
             Nifti1Image(np.random.randn(9, 9, 9), np.eye(4))]
    mask1 = compute_multi_gray_matter_mask(imgs1)

    imgs2 = [Nifti1Image(np.random.randn(9, 9, 9), np.eye(4)),
             Nifti1Image(np.random.randn(9, 9, 9), np.eye(4))]
    mask2 = compute_multi_gray_matter_mask(imgs2)

    assert_array_equal(get_data(mask1), get_data(mask2))
Example #25
0
def generate_maps(shape,
                  n_regions,
                  overlap=0,
                  border=1,
                  window="boxcar",
                  rand_gen=None,
                  affine=np.eye(4)):
    """Generate a 4D volume containing several maps.
    Parameters
    ==========
    n_regions: int
        number of regions to generate

    overlap: int
        approximate number of voxels common to two neighboring regions

    window: str
        name of a window in scipy.signal. Used to get non-uniform regions.

    border: int
        number of background voxels on each side of the 3D volumes.

    Returns
    =======
    maps: nibabel.Nifti1Image
        4D array, containing maps.
    """

    mask = np.zeros(shape, dtype=np.int8)
    mask[border:-border, border:-border, border:-border] = 1
    ts = generate_regions_ts(mask.sum(),
                             n_regions,
                             overlap=overlap,
                             rand_gen=rand_gen,
                             window=window)
    mask_img = Nifti1Image(mask, affine)
    return masking.unmask(ts, mask_img), mask_img
Example #26
0
 def exportNifti(self, event):
     """Export labels in the image browser as a nifti file."""
     print("  Exporting nifti file...")
     # put the permuted indices back to their original format
     cycBackPerm = (self.cycleCount, (self.cycleCount + 1) % 3,
                    (self.cycleCount + 2) % 3)
     # assing unique integers (for ncut labels)
     out_volHistMask = np.copy(self.volHistMask)
     labels = np.unique(self.volHistMask)
     intLabels = [i for i in range(labels.size)]
     for label, newLabel in zip(labels, intLabels):
         out_volHistMask[out_volHistMask == label] = intLabels[newLabel]
     # get 3D brain mask
     volume_image = np.transpose(self.invHistVolume, cycBackPerm)
     if cfg.discard_zeros:
         zmask = volume_image != 0
         temp_labeled_image = map_2D_hist_to_ima(volume_image[zmask],
                                                 out_volHistMask)
         out_nii = np.zeros(volume_image.shape)
         out_nii[zmask] = temp_labeled_image  # put back flat labels
     else:
         out_nii = map_2D_hist_to_ima(volume_image.flatten(),
                                      out_volHistMask)
         out_nii = out_nii.reshape(volume_image.shape)
     # save mask image as nii
     new_image = Nifti1Image(out_nii,
                             header=self.nii.get_header(),
                             affine=self.nii.get_affine())
     # get new flex file name and check for overwriting
     labels_out = '{}_labels_{}.nii.gz'.format(self.basename,
                                               self.nrExports)
     while os.path.isfile(labels_out):
         self.nrExports += 1
         labels_out = '{}_labels_{}.nii.gz'.format(self.basename,
                                                   self.nrExports)
     save(new_image, labels_out)
     print(f"    Saved as: {labels_out}")
Example #27
0
def test_resample_clip():
    # Resample and image and get larger and smaller
    # value than in the original. Use clip to get rid of these images

    shape = (6, 3, 6)
    data = np.zeros(shape=shape)
    data[1:-2, 1:-1, 1:-2] = 1

    source_affine = np.diag((2, 2, 2, 1))
    source_img = Nifti1Image(data, source_affine)

    target_affine = np.eye(4)
    no_clip_data = resample_img(source_img, target_affine,
                                clip=False).get_data()
    clip_data = resample_img(source_img, target_affine, clip=True).get_data()

    not_clip = np.where((no_clip_data > data.min())
                        & (no_clip_data < data.max()))

    assert_true(np.any(no_clip_data > data.max()))
    assert_true(np.any(no_clip_data < data.min()))
    assert_true(np.all(clip_data <= data.max()))
    assert_true(np.all(clip_data >= data.min()))
    assert_array_equal(no_clip_data[not_clip], clip_data[not_clip])
Example #28
0
bg_img = \
    nibabel.load('/git/pymri/examples/MNI152_T1_2mm_brain.nii.gz').get_data()
# X.shape is (91,109, 91, 216)
# and mask.shape is (91, 109, 91)

affine = np.array([[-2., 0., 0., 90.], [0., 2., 0., -126.], [0., 0., 2., -72.],
                   [0., 0., 0., 1.]])

usage_print()

# ### Masking step
from pymri.utils import masking, signal
from nibabel import Nifti1Image

# Mask data
X0_img = Nifti1Image(X0, affine)
X0 = masking.apply_mask(X0_img, mask, smoothing_fwhm=4)
X1_img = Nifti1Image(X1, affine)
X1 = masking.apply_mask(X1_img, mask, smoothing_fwhm=4)

# # Standardize data
# from sklearn import preprocessing
# for sample in range(len(X0)):
# X0[sample] = preprocessing.scale(X0[sample])
# for sample in range(len(X1)):
# X1[sample] = preprocessing.scale(X1[sample])

X = np.zeros(shape=(
    (len(X0) + len(X1)),
    X0.shape[-1],
))
Example #29
0
    def _detect_outliers_core(self, imgfile, motionfile, runidx, cwd=None):
        """
        Core routine for detecting outliers
        """
        if not cwd:
            cwd = os.getcwd()

        # read in functional image
        if isinstance(imgfile, (str, bytes)):
            nim = load(imgfile, mmap=NUMPY_MMAP)
        elif isinstance(imgfile, list):
            if len(imgfile) == 1:
                nim = load(imgfile[0], mmap=NUMPY_MMAP)
            else:
                images = [load(f, mmap=NUMPY_MMAP) for f in imgfile]
                nim = funcs.concat_images(images)

        # compute global intensity signal
        (x, y, z, timepoints) = nim.shape

        data = nim.get_data()
        affine = nim.affine
        g = np.zeros((timepoints, 1))
        masktype = self.inputs.mask_type
        if masktype == 'spm_global':  # spm_global like calculation
            iflogger.debug('art: using spm global')
            intersect_mask = self.inputs.intersect_mask
            if intersect_mask:
                mask = np.ones((x, y, z), dtype=bool)
                for t0 in range(timepoints):
                    vol = data[:, :, :, t0]
                    # Use an SPM like approach
                    mask_tmp = vol > \
                        (np.nanmean(vol) / self.inputs.global_threshold)
                    mask = mask * mask_tmp
                for t0 in range(timepoints):
                    vol = data[:, :, :, t0]
                    g[t0] = np.nanmean(vol[mask])
                if len(find_indices(mask)) < (np.prod((x, y, z)) / 10):
                    intersect_mask = False
                    g = np.zeros((timepoints, 1))
            if not intersect_mask:
                iflogger.info('not intersect_mask is True')
                mask = np.zeros((x, y, z, timepoints))
                for t0 in range(timepoints):
                    vol = data[:, :, :, t0]
                    mask_tmp = vol > \
                        (np.nanmean(vol) / self.inputs.global_threshold)
                    mask[:, :, :, t0] = mask_tmp
                    g[t0] = np.nansum(vol * mask_tmp) / np.nansum(mask_tmp)
        elif masktype == 'file':  # uses a mask image to determine intensity
            maskimg = load(self.inputs.mask_file, mmap=NUMPY_MMAP)
            mask = maskimg.get_data()
            affine = maskimg.affine
            mask = mask > 0.5
            for t0 in range(timepoints):
                vol = data[:, :, :, t0]
                g[t0] = np.nanmean(vol[mask])
        elif masktype == 'thresh':  # uses a fixed signal threshold
            for t0 in range(timepoints):
                vol = data[:, :, :, t0]
                mask = vol > self.inputs.mask_threshold
                g[t0] = np.nanmean(vol[mask])
        else:
            mask = np.ones((x, y, z))
            g = np.nanmean(data[mask > 0, :], 1)

        # compute normalized intensity values
        gz = signal.detrend(g, axis=0)  # detrend the signal
        if self.inputs.use_differences[1]:
            gz = np.concatenate((np.zeros((1, 1)), np.diff(gz, n=1, axis=0)),
                                axis=0)
        gz = (gz - np.mean(gz)) / np.std(gz)  # normalize the detrended signal
        iidx = find_indices(abs(gz) > self.inputs.zintensity_threshold)

        # read in motion parameters
        mc_in = np.loadtxt(motionfile)
        mc = deepcopy(mc_in)

        (artifactfile, intensityfile, statsfile, normfile, plotfile,
         displacementfile,
         maskfile) = self._get_output_filenames(imgfile, cwd)
        mask_img = Nifti1Image(mask.astype(np.uint8), affine)
        mask_img.to_filename(maskfile)

        if self.inputs.use_norm:
            brain_pts = None
            if self.inputs.bound_by_brainmask:
                voxel_coords = np.nonzero(mask)
                coords = np.vstack((voxel_coords[0],
                                    np.vstack(
                                        (voxel_coords[1], voxel_coords[2])))).T
                brain_pts = np.dot(
                    affine,
                    np.hstack((coords, np.ones((coords.shape[0], 1)))).T)
            # calculate the norm of the motion parameters
            normval, displacement = _calc_norm(mc,
                                               self.inputs.use_differences[0],
                                               self.inputs.parameter_source,
                                               brain_pts=brain_pts)
            tidx = find_indices(normval > self.inputs.norm_threshold)
            ridx = find_indices(normval < 0)
            if displacement is not None:
                dmap = np.zeros((x, y, z, timepoints), dtype=np.float)
                for i in range(timepoints):
                    dmap[voxel_coords[0], voxel_coords[1], voxel_coords[2],
                         i] = displacement[i, :]
                dimg = Nifti1Image(dmap, affine)
                dimg.to_filename(displacementfile)
        else:
            if self.inputs.use_differences[0]:
                mc = np.concatenate((np.zeros(
                    (1, 6)), np.diff(mc_in, n=1, axis=0)),
                                    axis=0)
            traval = mc[:, 0:3]  # translation parameters (mm)
            rotval = mc[:, 3:6]  # rotation parameters (rad)
            tidx = find_indices(
                np.sum(abs(traval) > self.inputs.translation_threshold, 1) > 0)
            ridx = find_indices(
                np.sum(abs(rotval) > self.inputs.rotation_threshold, 1) > 0)

        outliers = np.unique(np.union1d(iidx, np.union1d(tidx, ridx)))

        # write output to outputfile
        np.savetxt(artifactfile, outliers, fmt=b'%d', delimiter=' ')
        np.savetxt(intensityfile, g, fmt=b'%.2f', delimiter=' ')
        if self.inputs.use_norm:
            np.savetxt(normfile, normval, fmt=b'%.4f', delimiter=' ')

        if isdefined(self.inputs.save_plot) and self.inputs.save_plot:
            import matplotlib
            matplotlib.use(config.get("execution", "matplotlib_backend"))
            import matplotlib.pyplot as plt
            fig = plt.figure()
            if isdefined(self.inputs.use_norm) and self.inputs.use_norm:
                plt.subplot(211)
            else:
                plt.subplot(311)
            self._plot_outliers_with_wave(gz, iidx, 'Intensity')
            if isdefined(self.inputs.use_norm) and self.inputs.use_norm:
                plt.subplot(212)
                self._plot_outliers_with_wave(normval, np.union1d(tidx, ridx),
                                              'Norm (mm)')
            else:
                diff = ''
                if self.inputs.use_differences[0]:
                    diff = 'diff'
                plt.subplot(312)
                self._plot_outliers_with_wave(traval, tidx,
                                              'Translation (mm)' + diff)
                plt.subplot(313)
                self._plot_outliers_with_wave(rotval, ridx,
                                              'Rotation (rad)' + diff)
            plt.savefig(plotfile)
            plt.close(fig)

        motion_outliers = np.union1d(tidx, ridx)
        stats = [
            {
                'motion_file': motionfile,
                'functional_file': imgfile
            },
            {
                'common_outliers': len(np.intersect1d(iidx, motion_outliers)),
                'intensity_outliers': len(np.setdiff1d(iidx, motion_outliers)),
                'motion_outliers': len(np.setdiff1d(motion_outliers, iidx)),
            },
            {
                'motion': [
                    {
                        'using differences': self.inputs.use_differences[0]
                    },
                    {
                        'mean': np.mean(mc_in, axis=0).tolist(),
                        'min': np.min(mc_in, axis=0).tolist(),
                        'max': np.max(mc_in, axis=0).tolist(),
                        'std': np.std(mc_in, axis=0).tolist()
                    },
                ]
            },
            {
                'intensity': [
                    {
                        'using differences': self.inputs.use_differences[1]
                    },
                    {
                        'mean': np.mean(gz, axis=0).tolist(),
                        'min': np.min(gz, axis=0).tolist(),
                        'max': np.max(gz, axis=0).tolist(),
                        'std': np.std(gz, axis=0).tolist()
                    },
                ]
            },
        ]
        if self.inputs.use_norm:
            stats.insert(
                3, {
                    'motion_norm': {
                        'mean': np.mean(normval, axis=0).tolist(),
                        'min': np.min(normval, axis=0).tolist(),
                        'max': np.max(normval, axis=0).tolist(),
                        'std': np.std(normval, axis=0).tolist(),
                    }
                })
        save_json(statsfile, stats)
Example #30
0
# Local import
from get_data_light import DATA_DIR, get_second_level_dataset

# paths
input_image = path.join(DATA_DIR, 'spmT_0029.nii.gz')
mask_image = path.join(DATA_DIR, 'mask.nii.gz')
if (not path.exists(mask_image)) or (not path.exists(input_image)):
    get_second_level_dataset()

# write directory
write_dir = path.join(getcwd(), 'results')
if not path.exists(write_dir):
    mkdir(write_dir)

# read the data
mask = load(mask_image).get_data() > 0
ijk = np.array(np.where(mask)).T
nvox = ijk.shape[0]
data = load(input_image).get_data()[mask]
image_field = Field(nvox)
image_field.from_3d_grid(ijk, k=6)
image_field.set_field(data)
u, _ = image_field.ward(100)

# write the results
label_image = path.join(write_dir, 'label.nii')
wdata = mask - 1
wdata[mask] = u
save(Nifti1Image(wdata, load(mask_image).get_affine()), label_image)
print("Label image written in %s" % label_image)