Ejemplo n.º 1
0
def test_nan():
    data = np.ones((9, 9, 9))
    data[0] = np.nan
    data[:, 0] = np.nan
    data[:, :, 0] = np.nan
    data[-1] = np.nan
    data[:, -1] = np.nan
    data[:, :, -1] = np.nan
    data[3:-3, 3:-3, 3:-3] = 10
    img = Nifti1Image(data, np.eye(4))
    masker = NiftiMasker(mask_args=dict(opening=0))
    masker.fit(img)
    mask = get_data(masker.mask_img_)
    assert mask[1:-1, 1:-1, 1:-1].all()
    assert not mask[0].any()
    assert not mask[:, 0].any()
    assert not mask[:, :, 0].any()
    assert not mask[-1].any()
    assert not mask[:, -1].any()
    assert not mask[:, :, -1].any()
Ejemplo n.º 2
0
def test_4d_single_scan():
    mask = np.zeros((10, 10, 10))
    mask[3:7, 3:7, 3:7] = 1
    mask_img = Nifti1Image(mask, np.eye(4))

    # Test that, in list of 4d images with last dimension=1, they are
    # considered as 3d

    rng = np.random.RandomState(42)
    data_5d = [rng.random_sample((10, 10, 10, 1)) for i in range(5)]
    data_4d = [d[..., 0] for d in data_5d]
    data_5d = [nibabel.Nifti1Image(d, np.eye(4)) for d in data_5d]
    data_4d = [nibabel.Nifti1Image(d, np.eye(4)) for d in data_4d]

    masker = NiftiMasker(mask_img=mask_img)
    masker.fit()
    data_trans_5d = masker.transform(data_5d)
    data_trans_4d = masker.transform(data_4d)

    assert_array_equal(data_trans_4d, data_trans_5d)
Ejemplo n.º 3
0
def test_nan():
    data = np.ones((9, 9, 9))
    data[0] = np.nan
    data[:, 0] = np.nan
    data[:, :, 0] = np.nan
    data[-1] = np.nan
    data[:, -1] = np.nan
    data[:, :, -1] = np.nan
    data[3:-3, 3:-3, 3:-3] = 10
    img = Nifti1Image(data, np.eye(4))
    masker = NiftiMasker()
    masker.fit(img)
    mask = masker.mask_img_.get_data()
    assert_true(mask[1:-1, 1:-1, 1:-1].all())
    assert_false(mask[0].any())
    assert_false(mask[:, 0].any())
    assert_false(mask[:, :, 0].any())
    assert_false(mask[-1].any())
    assert_false(mask[:, -1].any())
    assert_false(mask[:, :, -1].any())
Ejemplo n.º 4
0
def test_error_shape(random_state=42, shape=(3, 5, 7, 11)):
    # open-ended `if .. elif` in masking.unmask

    rng = np.random.RandomState(random_state)

    # setup
    X = rng.randn()
    mask_img = np.zeros(shape, dtype=np.uint8)
    mask_img[rng.randn(*shape) > .4] = 1
    n_features = (mask_img > 0).sum()
    mask_img = Nifti1Image(mask_img, np.eye(4))
    n_samples = shape[0]

    X = rng.randn(n_samples, n_features, 2)
    # 3D X (unmask should raise a TypeError)
    assert_raises(TypeError, unmask, X, mask_img)

    X = rng.randn(n_samples, n_features)
    # Raises an error because the mask is 4D
    assert_raises(TypeError, unmask, X, mask_img)
Ejemplo n.º 5
0
def test_auto_mask():
    # This mostly a smoke test
    data = np.zeros((9, 9, 9))
    data[3:-3, 3:-3, 3:-3] = 10
    img = Nifti1Image(data, np.eye(4))
    masker = NiftiMasker()
    # Smoke test the fit
    masker.fit(img)
    # Smoke test the transform
    # With a 4D img
    masker.transform([
        img,
    ])
    # With a 3D img
    masker.transform(img)

    # check exception when transform() called without prior fit()
    masker2 = NiftiMasker(mask_img=img)
    with pytest.raises(ValueError, match='has not been fitted. '):
        masker2.transform(img)
Ejemplo n.º 6
0
def generate_labeled_regions(shape, n_regions, rand_gen=None, labels=None,
                             affine=np.eye(4), dtype=np.int):
    """Generate a 3D volume with labeled regions.

    Parameters
    ==========
    shape: tuple
        shape of returned array

    n_regions: int
        number of regions to generate. By default (if "labels" is None),
        add a background with value zero.

    labels: iterable
        labels to use for each zone. If provided, n_regions is unused.

    rand_gen: numpy.random.RandomState
        random generator to use for generation.

    affine: numpy.ndarray
        affine of returned image

    Returns
    =======
    regions: nibabel.Nifti1Image
        data has shape "shape", containing region labels.
    """
    n_voxels = shape[0] * shape[1] * shape[2]
    if labels is None:
        labels = xrange(0, n_regions + 1)
        n_regions += 1
    else:
        n_regions = len(labels)

    regions = generate_regions_ts(n_voxels, n_regions, rand_gen=rand_gen)
    # replace weights with labels
    for n, row in zip(labels, regions):
        row[row > 0] = n
    data = np.zeros(shape, dtype=dtype)
    data[np.ones(shape, dtype=np.bool)] = regions.sum(axis=0).T
    return Nifti1Image(data, affine)
Ejemplo n.º 7
0
 def mask_atlases(self, atlases_to_mask, tissue_for_masking, tissue_thr = .1):
    """Mask atlas(es) for the selected tissue type
           
    Parameters
    ----------
    atlases_dir : a string with the absolute path to the directoty where the atlas in single subject space are stored
    atlases_to_mask : a string or a list of strings with the name of the atlas(es) to be treated (with no extension)
    tissue_for_masking : a string in ["gm", "wm", "csf"]
    tissue_thr : threshold value for the tissue type - default at .1
    output_dir :  a string with the absolute path to the directory where the masked atlases should be written
    
    The function return number of subject * number of atlas nii file named [atlas]_masked_[subject]
    
    """
    if not(isdir(self.masked_atlas_dir)):
        mkdir(self.masked_atlas_dir)
    if type(atlases_to_mask) == str:
        atlases_to_mask = [atlases_to_mask]
    if tissue_for_masking == "gm":
        tissue_prefix = "p1"
    elif tissue_for_masking == "wm":
        tissue_prefix = "p2"
    elif tissue_for_masking =="csf":
        tissue_prefix = "p3"
    for atlas_name in atlases_to_mask:
        single_subject_atlases = sorted(glob("{}/{}*nii".format(self.atlas_dir, atlas_name)))
        for n, f in enumerate(single_subject_atlases):
            subject_name = Path(f).stem.split(atlas_name + "_")[1]
            print("working on subject {}, atlas {}, {}% treated".format(subject_name, atlas_name, round(((n+1)/((len(single_subject_atlases) * len(atlases_to_mask))))*100)))
            tissue_dir = str(Path(f).parents[1]) + "/mri"
            atlas = load_img(f)
            tissue_filename = check_for_multiple_match_ask_input("{}/{}{}*".format(tissue_dir, tissue_prefix, subject_name))
            if tissue_filename is None:
                continue
            tissue_image = load_img(tissue_filename)
            tissue_image = math_img("i > {}".format(str(tissue_thr)), i = tissue_image)
            tissue_array = tissue_image.get_data()
            atlas_array = atlas.get_data()
            masked_array = tissue_array * atlas_array
            masked_atlas = Nifti1Image(masked_array, tissue_image.affine, header = tissue_image.header)
            masked_atlas.to_filename("{}/{}_masked_{}.nii".format(self.masked_atlas_dir, atlas_name, subject_name))
Ejemplo n.º 8
0
def resample_to_subject(tex_path, subject, side, output_path, verbose=False):
    """Resample a given texture from fsaverage to subject space
    
    Parameters
    ==========
    tex_path: string, path of the input texture
    subject: string, subject id in the freesurfer database
    side: string, on of ['left', 'right']
    outoput_path: string
    verbose: boolean, the verbosity mode
    """
    # convert the input to .mgz format
    mgz = tex_path[:-4] + '.mgz'
    tex = load_texture(tex_path)[np.newaxis, np.newaxis].T
    mghformat.save(Nifti1Image(tex, np.eye(4)), mgz)
    fs_comment = commands.getoutput(
        'mri_surf2surf --trgsubject %s --trgsurfval %s --srcsubject ico --srcsurfval %s --hemi %sh --srcicoorder 7' % (
                subject, output_path, mgz, side[0]))
    if verbose:
        print fs_comment
    return output_path
Ejemplo n.º 9
0
def test_4d_reports(mask):
    # Dummy 4D data
    data = np.zeros((10, 10, 10, 3), dtype=int)
    data[..., 0] = 1
    data[..., 1] = 2
    data[..., 2] = 3
    data_img_4d = Nifti1Image(data, np.eye(4))

    # test .fit method
    masker = NiftiMasker(mask_strategy='epi')
    masker.fit(data_img_4d)
    assert masker._report_content['warning_message'] is None
    html = masker.generate_report()
    _check_html(html)

    # test .fit_transform method
    masker = NiftiMasker(mask_img=mask, standardize=True)
    masker.fit_transform(data_img_4d)
    assert masker._report_content['warning_message'] is None
    html = masker.generate_report()
    _check_html(html)
Ejemplo n.º 10
0
def check_and_equalize_affine(path_to_atlas, path_to_image_to_extract):
    """Check if two images have EXACTLY the same affine, if True than it returns the images as they are, in False
        it registers the atlas image onto the image to extract. The two images SHOULD BE already in alignment, 
        as this function is taught to correct for decimal differences in the affine that can create problems
        with the NiftiLabelMasker class
        
    Parameters
    ----------
    path to atlas : absolute path to the atlas image containing the ROIs from which values are to be extracted
    path_to_image_to_extract : absolute path to the image whose values are to be extracted. This image have to be in the same
    space as the atlas
    
    
    """
    atlas_img = load_img(path_to_atlas)
    image_to_extract = load_img(path_to_image_to_extract)
    if np.array_equal(atlas_img.affine, image_to_extract.affine):
        return (atlas_img, image_to_extract)
    else:
        atlas_img = Nifti1Image(atlas_img.get_data(), image_to_extract.affine, header = image_to_extract.header)
    return (atlas_img, image_to_extract)
Ejemplo n.º 11
0
    def rescale(self, nbins=255, numproc=2, overwrite=True):
        """Method to convert the brain images from MRI signals to a given pixel value scale

        :param nbins: {int} integer representing the number of bins generated.
        :param numproc: {int} number of parallel processes applied to rescale.
        :param overwrite: {bool} whether the original image in the attribute :py:attr:`img` should be overwritten.
        :return:
        """
        print("Rescaling pixel intensity range...")
        try:
            p = Pool(numproc)  # number of parallel processes
            self.data = np.array(p.map(partial(_rescale, nbins, np.max(self.img.dataobj)), self.img.dataobj.T.astype(
                    'float64'))).T
        finally:
            p.close()
            p.join()
        if overwrite:
            self.img = Nifti1Image(self.data, self.img.affine)
            self.data = None
        self.img.uncache()
        print("\tRescaled!")
Ejemplo n.º 12
0
def test_resample_img_segmentation_fault():
    if os.environ.get('APPVEYOR') == 'True':
        raise SkipTest('This test too slow (7-8 minutes) on AppVeyor')

    # see https://github.com/nilearn/nilearn/issues/346
    shape_in = (64, 64, 64)
    aff_in = np.diag([2., 2., 2., 1.])
    aff_out = np.diag([3., 3., 3., 1.])
    # fourth_dim = 1024 works fine but for 1025 creates a segmentation
    # fault with scipy < 0.14.1
    fourth_dim = 1025

    try:
        data = np.ones(shape_in + (fourth_dim, ), dtype=np.float64)
    except MemoryError:
        # This can happen on AppVeyor and for 32-bit Python on Windows
        raise SkipTest('Not enough RAM to run this test')

    img_in = Nifti1Image(data, aff_in)

    resample_img(img_in, target_affine=aff_out, interpolation='nearest')
Ejemplo n.º 13
0
def test_raises_bbox_error_if_data_outside_box():
    # Make some cases which should raise exceptions

    # original image
    data = np.zeros([8, 9, 10])
    affine = np.eye(4)
    affine_offset = np.array([1, 1, 1])
    affine[:3, 3] = affine_offset

    img = Nifti1Image(data, affine)

    # some axis flipping affines
    axis_flips = np.array(
        map(np.diag, [[-1, 1, 1, 1], [1, -1, 1, 1], [1, 1, -1, 1],
                      [-1, -1, 1, 1], [-1, 1, -1, 1], [1, -1, -1, 1]]))

    # some in plane 90 degree rotations base on these
    # (by permuting two lines)
    af = axis_flips
    rotations = np.array([
        af[0][[1, 0, 2, 3]], af[0][[2, 1, 0, 3]], af[1][[1, 0, 2, 3]],
        af[1][[0, 2, 1, 3]], af[2][[2, 1, 0, 3]], af[2][[0, 2, 1, 3]]
    ])

    new_affines = np.concatenate([axis_flips, rotations])
    new_offset = np.array([0., 0., 0.])
    new_affines[:, :3, 3] = new_offset[np.newaxis, :]

    for new_affine in new_affines:
        exception = BoundingBoxError
        message = ("The field of view given "
                   "by the target affine does "
                   "not contain any of the data")
        function = lambda *args: resample_img(img, target_affine=new_affine)

        # Avoid sklearn backward compatbility issues
        # assert_raise_message(exception, message, function)

        with assert_raises(exception):
            function()
Ejemplo n.º 14
0
    def _addImage(self,
                  img: nib.Nifti1Image,
                  path: str,
                  updateLayout: bool = True) -> None:
        """
        Replace the image in the dataset at the provided path, creating the path
        if it does not exist.

        Args:
            img: The image to add to the archive
            path: Relative path in archive at which to add image
            updateLayout: Update the underlying layout object upon conclusion of
                the image addition.
        """
        bids_write_to_file(path,
                           img.to_bytes(),
                           content_mode='binary',
                           root=self.rootPath,
                           conflicts='overwrite')

        if updateLayout:
            self._updateLayout()
Ejemplo n.º 15
0
def apply_mask(niimgs, mask_img, dtype=np.float32,
               smoothing_fwhm=None, ensure_finite=True):
    """Extract signals from images using specified mask.

    Read the time series from the given nifti images or filepaths,
    using the mask.

    Parameters
    -----------
    niimgs: list of 4D nifti images
        Images to be masked. list of lists of 3D images are also accepted.

    mask_img: niimg
        3D mask array: True where a voxel should be used.

    smoothing_fwhm: float
        (optional) Gives the size of the spatial smoothing to apply to
        the signal, in voxels. Implies ensure_finite=True.

    ensure_finite: bool
        If ensure_finite is True (default), the non-finite values (NaNs and
        infs) found in the images will be replaced by zeros.

    Returns
    --------
    session_series: numpy.ndarray
        2D array of series with shape (image number, voxel number)

    Notes
    -----
    When using smoothing, ensure_finite is set to True, as non-finite
    values would spread accross the image.
    """
    mask, mask_affine = _load_mask_img(mask_img)
    mask_img = Nifti1Image(_utils.as_ndarray(mask, dtype=np.int8),
                           mask_affine)
    return _apply_mask_fmri(niimgs, mask_img, dtype=dtype,
                            smoothing_fwhm=smoothing_fwhm,
                            ensure_finite=ensure_finite)
Ejemplo n.º 16
0
    def combine_brains(self, slices=None):
        """Method to combine all brains in the loaded filename dictionary into a big data object with the individual
        brain data in the 4th dimension.

        :param slices: {int} number of slices to load from each brain (testing purpose). The average from ±1 slice
            is loaded for every slice.
        :return: data of all files loaded into the attribute :py:attr:`data`
        """
        print("Loading %i files..." % len(self.names))
        self.img = load_img(self.filenames)
        if slices:
            step = int(float(self.img.shape[2]) / float(slices + 1))
            newshape = list(self.img.shape)
            newshape[2] = slices
            imgarr = np.empty(shape=tuple(newshape))
            for s, img in enumerate(self.img.dataobj.T):
                for i in range(1, slices + 1):
                    imgarr[..., i - 1, s] = np.mean(img.T[..., (i * step - 1):(i * step + 1)], axis=2)
            self.img = Nifti1Image(imgarr.reshape((self.img.shape[0], self.img.shape[1], slices, len(self.filenames))),
                                   self.img.affine)
        self.img.uncache()
        print("\tAll files loaded!")
Ejemplo n.º 17
0
def test_fit():
    # Generate a single state with obvious structure
    n_time = 100
    tseries, _ = simu_tseries(n_time=n_time, n_roi=125, n_clusters=3, alpha=2)
    img = Nifti1Image(np.reshape(tseries, [5, 5, 5, n_time]), np.eye(4))

    # Generate a mask for the image
    mask = new_img_like(img, np.ones([5, 5, 5]), np.eye(4))

    # fit a dypac model
    n_replications = 10
    n_clusters = 3
    n_states = 3
    model = Dypac(n_clusters=n_clusters,
                  n_states=3,
                  n_replications=n_replications,
                  mask=mask)
    model.fit(img)
    print(model.components_.shape)

    # Check that the fitted model has the right number of components
    assert model.components_.shape[0] == n_states
Ejemplo n.º 18
0
def test_compute_brain_mask():
    # Check masker for template masking strategy

    img = np.random.RandomState(42).uniform(size=(9, 9, 5))
    img = Nifti1Image(img, np.eye(4))

    masker = NiftiMasker(mask_strategy='template')

    masker.fit(img)
    mask1 = masker.mask_img_

    masker2 = NiftiMasker(mask_strategy='template',
                          mask_args=dict(threshold=0.))

    masker2.fit(img)
    mask2 = masker2.mask_img_

    mask_ref = np.zeros((9, 9, 5))
    np.testing.assert_array_equal(get_data(mask1), mask_ref)

    mask_ref[2:7, 2:7, 2] = 1
    np.testing.assert_array_equal(get_data(mask2), mask_ref)
Ejemplo n.º 19
0
def test_iterator_generator():
    # Create a list of random images
    l = [
        Nifti1Image(np.random.random((10, 10, 10)), np.eye(4))
        for i in range(10)
    ]
    cc = _utils.concat_niimgs(l)
    assert_equal(cc.shape[-1], 10)
    assert_array_almost_equal(cc.get_data()[..., 0], l[0].get_data())

    # Same with iteration
    i = image.iter_img(l)
    cc = _utils.concat_niimgs(i)
    assert_equal(cc.shape[-1], 10)
    assert_array_almost_equal(cc.get_data()[..., 0], l[0].get_data())

    # Now, a generator
    b = []
    g = nifti_generator(b)
    cc = _utils.concat_niimgs(g)
    assert_equal(cc.shape[-1], 10)
    assert_equal(len(b), 10)
Ejemplo n.º 20
0
def test_img_data_dtype():
    # Ignoring complex, binary, 128+ bit, RGBA
    nifti1_dtypes = (np.uint8, np.uint16, np.uint32, np.uint64, np.int8,
                     np.int16, np.int32, np.int64, np.float32, np.float64)
    dtype_matches = []
    with InTemporaryDirectory():
        for logical_dtype in nifti1_dtypes:
            dataobj = np.random.uniform(0, 255,
                                        size=(2, 2, 2)).astype(logical_dtype)
            for on_disk_dtype in nifti1_dtypes:
                img = Nifti1Image(dataobj, np.eye(4))
                img.set_data_dtype(on_disk_dtype)
                img.to_filename('test.nii')
                loaded = nb.load('test.nii')
                # To verify later that sometimes these differ meaningfully
                dtype_matches.append(
                    loaded.get_data_dtype() == niimg.img_data_dtype(loaded))
                assert (np.array(
                    loaded.dataobj).dtype == niimg.img_data_dtype(loaded))
    # Verify that the distinction is worth making
    assert any(dtype_matches)
    assert not all(dtype_matches)
Ejemplo n.º 21
0
def test_first_level_with_scaling():
    shapes, rk = [(3, 1, 1, 2)], 1
    fmri_data = list()
    fmri_data.append(Nifti1Image(np.zeros((1, 1, 1, 2)) + 6, np.eye(4)))
    design_matrices = list()
    design_matrices.append(
        pd.DataFrame(np.ones((shapes[0][-1], rk)),
                     columns=list('abcdefghijklmnopqrstuvwxyz')[:rk]))
    fmri_glm = FirstLevelModel(mask_img=False,
                               noise_model='ols',
                               signal_scaling=0,
                               minimize_memory=True)
    assert fmri_glm.signal_scaling == 0
    assert not fmri_glm.standardize
    with pytest.warns(DeprecationWarning,
                      match="Deprecated. `scaling_axis` will be removed"):
        assert fmri_glm.scaling_axis == 0
    glm_parameters = fmri_glm.get_params()
    test_glm = FirstLevelModel(**glm_parameters)
    fmri_glm = fmri_glm.fit(fmri_data, design_matrices=design_matrices)
    test_glm = test_glm.fit(fmri_data, design_matrices=design_matrices)
    assert glm_parameters['signal_scaling'] == 0
Ejemplo n.º 22
0
def test_compute_gray_matter_mask():
    # Check masker for template masking strategy

    img = np.random.rand(9, 9, 5)
    img = Nifti1Image(img, np.eye(4))

    masker = NiftiMasker(mask_strategy='template')

    masker.fit(img)
    mask1 = masker.mask_img_

    masker2 = NiftiMasker(mask_strategy='template',
                          mask_args=dict(threshold=0.))

    masker2.fit(img)
    mask2 = masker2.mask_img_

    mask_ref = np.zeros((9, 9, 5))
    mask_ref[2:7, 2:7, 2] = 1

    np.testing.assert_array_equal(mask1.get_data(), mask_ref)
    np.testing.assert_array_equal(mask2.get_data(), mask_ref)
Ejemplo n.º 23
0
def test_get_cut_slices():

    # Generate simple simulated data with one "spot"
    img, data = _simulate_img()

    # Use automatic selection of coordinates
    cut_slices = bp._get_cut_slices(img, cut_coords=None, threshold=None)
    assert (cut_slices == [4, 4, 4]).all()

    # Check that using a single number for cut_coords raises an error
    with pytest.raises(ValueError):
        bp._get_cut_slices(img, cut_coords=4, threshold=None)

    # Check that it is possible to manually specify coordinates
    cut_slices = bp._get_cut_slices(img, cut_coords=[2, 2, 2], threshold=None)
    assert (cut_slices == [2, 2, 2]).all()

    # Check that the affine does not change where the cut is done
    affine = 2 * np.eye(4)
    img = Nifti1Image(data, affine)
    cut_slices = bp._get_cut_slices(img, cut_coords=None, threshold=None)
    assert (cut_slices == [4, 4, 4]).all()
Ejemplo n.º 24
0
def run_preprocessing(image_file, output_file):
    # Load and crop
    img = check_niimg(image_file, ensure_ndim=4)
    img_data = img.get_data()
    img_data = img_data[:, :, :, 17:]  # Initial scanner setup
    img_data = img_data[:, :, :, 27:]  # Starting cartoon
    img_data = img_data[:, :, :, :975]
    print('Data matrix shape: ' + str(img_data.shape))

    img_data = np.reshape(img_data, (245245, img_data.shape[3]))

    # Normalize data
    Y = zscore(img_data).T

    # Detrend data
    Y = detrend_data(Y)
    print('Detrended data matrix: ' + str(Y.shape))

    Y = np.reshape(Y.T, (65, 77, 49, 975))

    r_squared_img = Nifti1Image(Y, affine=img.affine)
    r_squared_img.to_filename(output_file)
Ejemplo n.º 25
0
def savenifti(arr, filename, niftiobj=None, affine=None, header=None):
    '''
    savenifti(arr, filename, niftiobj=None, affine=None, header=None):

    save 3D or 4D <array> into a full nifti filename using <filename>. We used
    the affine and header information from <niftiobj>.

    Note that the file name can have either the ext as '.nii' or '.nii.gz'. nibabel
    can take care of it.

    If <affine> or <header> is supplied, they will overwrite information from the <niftiobj>

    20180622 RZ add support for affine and header

    '''
    from nibabel import save, Nifti1Image
    from numpy import ndarray
    from RZutilpy.system import makedirs, Path

    # check input
    assert isinstance(
        arr,
        ndarray) and (3 <= arr.ndim <= 4), 'Please input 3d or 4d an ndarray!'

    # make the dir if it does not exist
    filename = Path(filename) if ~isinstance(filename, Path) else filename
    makedirs(filename)  # note here we add a os.sep

    if affine is None or header is None:
        assert isinstance(
            niftiobj,
            Nifti1Image), 'affine or header is none, you must supply niftiobj'

    # get affine and header information
    affine = niftiobj.affine.copy() if affine is None else affine.copy()
    header = niftiobj.header.copy() if header is None else header.copy()

    save(Nifti1Image(arr, affine, header), filename.str)
Ejemplo n.º 26
0
def generate_warping_field(fname_dest,
                           warp_x,
                           warp_y,
                           fname_warp='warping_field.nii.gz',
                           verbose=1):
    """
    Generate an ITK warping field
    :param fname_dest:
    :param warp_x:
    :param warp_y:
    :param fname_warp:
    :param verbose:
    :return:
    """
    sct.printv('\nGenerate warping field...', verbose)

    # Get image dimensions
    # sct.printv('Get destination dimension', verbose)
    nx, ny, nz, nt, px, py, pz, pt = Image(fname_dest).dim
    # sct.printv('  matrix size: '+str(nx)+' x '+str(ny)+' x '+str(nz), verbose)
    # sct.printv('  voxel size:  '+str(px)+'mm x '+str(py)+'mm x '+str(pz)+'mm', verbose)

    # initialize
    data_warp = np.zeros((nx, ny, nz, 1, 3))

    # fill matrix
    data_warp[:, :, :, 0, 0] = -warp_x  # need to invert due to ITK conventions
    data_warp[:, :, :, 0, 1] = -warp_y  # need to invert due to ITK conventions

    # save warping field
    im_dest = load(fname_dest)
    hdr_dest = im_dest.get_header()
    hdr_warp = hdr_dest.copy()
    hdr_warp.set_intent('vector', (), '')
    hdr_warp.set_data_dtype('float32')
    img = Nifti1Image(data_warp, None, hdr_warp)
    save(img, fname_warp)
    sct.printv(' --> ' + fname_warp, verbose)
Ejemplo n.º 27
0
    def save(self, type=''):
        """
        Write an image in a nifti file
        :param type:    if not set, the image is saved in the same type as input data
                        if 'minimize', image space is minimize
                        (2, 'uint8', np.uint8, "NIFTI_TYPE_UINT8"),
                        (4, 'int16', np.int16, "NIFTI_TYPE_INT16"),
                        (8, 'int32', np.int32, "NIFTI_TYPE_INT32"),
                        (16, 'float32', np.float32, "NIFTI_TYPE_FLOAT32"),
                        (32, 'complex64', np.complex64, "NIFTI_TYPE_COMPLEX64"),
                        (64, 'float64', np.float64, "NIFTI_TYPE_FLOAT64"),
                        (256, 'int8', np.int8, "NIFTI_TYPE_INT8"),
                        (512, 'uint16', np.uint16, "NIFTI_TYPE_UINT16"),
                        (768, 'uint32', np.uint32, "NIFTI_TYPE_UINT32"),
                        (1024,'int64', np.int64, "NIFTI_TYPE_INT64"),
                        (1280, 'uint64', np.uint64, "NIFTI_TYPE_UINT64"),
                        (1536, 'float128', _float128t, "NIFTI_TYPE_FLOAT128"),
                        (1792, 'complex128', np.complex128, "NIFTI_TYPE_COMPLEX128"),
                        (2048, 'complex256', _complex256t, "NIFTI_TYPE_COMPLEX256"),
        """
        from nibabel import Nifti1Image, save
        from sct_utils import printv
        if type != '':
            self.changeType(type)
        if self.hdr:
            self.hdr.set_data_shape(self.data.shape)
        img = Nifti1Image(self.data, None, self.hdr)
        #printv('saving ' + self.path + self.file_name + self.ext + '\n', self.verbose)

        from os import path, remove
        fname_out = self.path + self.file_name + self.ext
        if path.isfile(fname_out):
            printv(
                'WARNING: File ' + fname_out + ' already exists. Deleting it.',
                1, 'warning')
            remove(fname_out)
        # save file
        save(img, fname_out)
Ejemplo n.º 28
0
def unmask(X, mask_img, order="F"):
    """Take masked data and bring them back into 3D/4D

    This function can be applied to a list of masked data.

    Parameters
    ==========
    X: numpy.ndarray (or list of)
        Masked data. shape: (samples #, features #).
        If X is one-dimensional, it is assumed that samples# == 1.
    mask_img: nifti-like image
        Mask. Must be 3-dimensional.

    Returns
    =======
    data: nifti-like image (or list of)
        Unmasked data. Depending on the shape of X, data can have
        different shapes:

        - X.ndim == 2:
          Shape: (mask.shape[0], mask.shape[1], mask.shape[2], X.shape[0])
        - X.ndim == 1:
          Shape: (mask.shape[0], mask.shape[1], mask.shape[2])
    """

    if isinstance(X, list):
        ret = []
        for x in X:
            ret.append(unmask(x, mask_img, order=order))  # 1-level recursion
        return ret

    mask, affine = _load_mask_img(mask_img)

    if X.ndim == 2:
        unmasked = _unmask_nd(X, mask, order=order)
    elif X.ndim == 1:
        unmasked = _unmask_3d(X, mask, order=order)
    return Nifti1Image(unmasked, affine)
Ejemplo n.º 29
0
def test_3x3_affine_bbox():
    # Test that the bounding-box is properly computed when
    # transforming with a negative affine component
    # This is specifically to test for a change in behavior between
    # scipy < 0.18 and scipy >= 0.18, which is an interaction between
    # offset and a diagonal affine
    image = np.ones((20, 30))
    source_affine = np.eye(4)
    # Give the affine an offset
    source_affine[:2, 3] = np.array([96, 64])

    # We need to turn this data into a nibabel image
    img = Nifti1Image(image[:, :, np.newaxis], affine=source_affine)

    target_affine_3x3 = np.eye(3) * 2
    # One negative axes
    target_affine_3x3[1] *= -1

    img_3d_affine = resample_img(img, target_affine=target_affine_3x3)

    # If the bounding box is computed wrong, the image will be only
    # zeros
    np.testing.assert_allclose(img_3d_affine.get_data().max(), image.max())
Ejemplo n.º 30
0
def test_iterator_generator():
    # Create a list of random images
    rng = np.random.RandomState(42)
    list_images = [
        Nifti1Image(rng.random_sample((10, 10, 10)), np.eye(4))
        for i in range(10)
    ]
    cc = _utils.concat_niimgs(list_images)
    assert cc.shape[-1] == 10
    assert_array_almost_equal(get_data(cc)[..., 0], get_data(list_images[0]))

    # Same with iteration
    i = image.iter_img(list_images)
    cc = _utils.concat_niimgs(i)
    assert cc.shape[-1] == 10
    assert_array_almost_equal(get_data(cc)[..., 0], get_data(list_images[0]))

    # Now, a generator
    b = []
    g = nifti_generator(b)
    cc = _utils.concat_niimgs(g)
    assert cc.shape[-1] == 10
    assert len(b) == 10
Ejemplo n.º 31
0
def _interpolate_data(stc, morph, mri_resolution=True, mri_space=True,
                      output='nifti1'):
    """Interpolate source estimate data to MRI."""
    _check_dep(nibabel='2.1.0', dipy=False)
    if output not in ('nifti', 'nifti1', 'nifti2'):
        raise ValueError("invalid output specifier %s. Must be 'nifti1' or"
                         " 'nifti2'" % output)
    if output in ('nifti', 'nifti1'):
        from nibabel import (Nifti1Image as NiftiImage,
                             Nifti1Header as NiftiHeader)
    else:
        assert output == 'nifti2'
        from nibabel import (Nifti2Image as NiftiImage,
                             Nifti2Header as NiftiHeader)
    assert morph.kind == 'volume'

    voxel_size_defined = False

    if isinstance(mri_resolution, (int, float)) and not isinstance(
            mri_resolution, bool):
        # use iso voxel size
        mri_resolution = (float(mri_resolution),) * 3

    if isinstance(mri_resolution, tuple):
        _check_dep(nibabel=False, dipy='0.10.1')  # nibabel was already checked
        from dipy.align.reslice import reslice

        voxel_size = mri_resolution
        voxel_size_defined = True
        mri_resolution = True

    # if data wasn't morphed yet - necessary for call of
    # stc_unmorphed.as_volume. Since only the shape of src is known, it cannot
    # be resliced to a given voxel size without knowing the original.
    if isinstance(morph, SourceSpaces):
        assert morph.kind == 'volume'
        if voxel_size_defined:
            raise ValueError(
                "Cannot infer original voxel size for reslicing... "
                "set mri_resolution to boolean value or apply morph first.")
        from mne.io.constants import BunchConst
        morph = BunchConst(src_data=_get_src_data(morph)[0])

    # setup volume parameters
    n_times = stc.data.shape[1]
    shape3d = morph.src_data['src_shape']
    shape = (n_times,) + shape3d
    vols = np.zeros(shape)

    mask3d = morph.src_data['inuse'].reshape(shape3d).astype(np.bool)
    n_vertices = np.sum(mask3d)

    n_vertices_seen = 0
    for k, vol in enumerate(vols):  # loop over time instants
        stc_slice = slice(n_vertices_seen, n_vertices_seen + n_vertices)
        vol[mask3d] = stc.data[stc_slice, k]

    n_vertices_seen += n_vertices

    # use mri resolution as represented in src
    if mri_resolution:
        mri_shape3d = morph.src_data['src_shape_full']
        mri_shape = (n_times,) + mri_shape3d
        mri_vol = np.zeros(mri_shape)

        interpolator = morph.src_data['interpolator']

        for k, vol in enumerate(vols):
            mri_vol[k] = (interpolator * vol.ravel()).reshape(mri_shape3d)
        vols = mri_vol

    vols = vols.T

    # set correct space
    affine = morph.src_data['src_affine_vox']

    if not mri_resolution:
        affine = morph.src_data['src_affine_src']

    if mri_space:
        affine = np.dot(morph.src_data['src_affine_ras'], affine)

    affine[:3] *= 1e3

    # pre-define header
    header = NiftiHeader()
    header.set_xyzt_units('mm', 'msec')
    header['pixdim'][4] = 1e3 * stc.tstep

    with warnings.catch_warnings():  # nibabel<->numpy warning
        img = NiftiImage(vols, affine, header=header)

    # if a specific voxel size was targeted (only possible after morphing)
    if voxel_size_defined:
        # reslice mri
        img, img_affine = reslice(
            img.get_data(), img.affine, _get_zooms_orig(morph), voxel_size)
        with warnings.catch_warnings():  # nibabel<->numpy warning
            img = NiftiImage(img, img_affine, header=header)

    return img
Ejemplo n.º 32
0
def _morphed_stc_as_volume(morph, stc, mri_resolution=False, mri_space=True,
                           output='nifti1'):
    """Return volume source space as Nifti1Image and/or save to disk."""
    if not isinstance(stc, VolSourceEstimate):
        raise ValueError('Only volume source estimates can be converted to '
                         'volumes')
    _check_dep(nibabel='2.1.0', dipy=False)

    known_types = ('nifti', 'nifti1', 'nifti2')
    if output not in known_types:
        raise ValueError('output must be one of %s, got %s'
                         % (known_types, output))
    if output in ('nifti', 'nifti1'):
        from nibabel import (Nifti1Image as NiftiImage,
                             Nifti1Header as NiftiHeader)
    else:
        assert output == 'nifti2'
        from nibabel import (Nifti2Image as NiftiImage,
                             Nifti2Header as NiftiHeader)

    new_zooms = None

    # if full MRI resolution, compute zooms from shape and MRI zooms
    if isinstance(mri_resolution, bool) and mri_resolution:
        new_zooms = _get_zooms_orig(morph)

    # if MRI resolution is set manually as a single value, convert to tuple
    if isinstance(mri_resolution, (int, float)) and not isinstance(
            mri_resolution, bool):
        # use iso voxel size
        new_zooms = (float(mri_resolution),) * 3

    # if MRI resolution is set manually as a tuple, use it
    if isinstance(mri_resolution, tuple):
        new_zooms = mri_resolution

    # create header
    hdr = NiftiHeader()
    hdr.set_xyzt_units('mm', 'msec')
    hdr['pixdim'][4] = 1e3 * stc.tstep

    # setup empty volume
    img = np.zeros(morph.shape + (stc.shape[1],)).reshape(-1, stc.shape[1])
    img[stc.vertices, :] = stc.data

    img = img.reshape(morph.shape + (-1,))

    # make nifti from data
    with warnings.catch_warnings():  # nibabel<->numpy warning
        img = NiftiImage(img, morph.affine, header=hdr)

    # reslice in case of manually defined voxel size
    zooms = morph.zooms[:3]
    if new_zooms is not None:
        from dipy.align.reslice import reslice
        new_zooms = new_zooms[:3]
        img, affine = reslice(img.get_data(),
                              img.affine,  # MRI to world registration
                              zooms,  # old voxel size in mm
                              new_zooms)  # new voxel size in mm
        with warnings.catch_warnings():  # nibabel<->numpy warning
            img = NiftiImage(img, affine)
        zooms = new_zooms

    #  set zooms in header
    img.header.set_zooms(tuple(zooms) + (1,))
    return img
Ejemplo n.º 33
0
def ReadNifti(filename):
    nft = squeeze_image(Nifti1Image.from_filename(filename))
    return nft