def test_different_affines():
    # Mask and EIP files with different affines
    mask_img = Nifti1Image(np.ones((2, 2, 2), dtype=np.int8),
                           affine=np.diag((4, 4, 4, 1)))
    epi_img1 = Nifti1Image(np.ones((4, 4, 4, 3)), affine=np.diag((2, 2, 2, 1)))
    epi_img2 = Nifti1Image(np.ones((3, 3, 3, 3)), affine=np.diag((3, 3, 3, 1)))
    masker = MultiNiftiMasker(mask_img=mask_img)
    epis = masker.fit_transform([epi_img1, epi_img2])
    for this_epi in epis:
        masker.inverse_transform(this_epi)
def test_dtype():
    data = np.zeros((9, 9, 9), dtype=np.float64)
    data[2:-2, 2:-2, 2:-2] = 10
    img = Nifti1Image(data, np.eye(4))

    masker = MultiNiftiMasker(dtype='auto')
    masker.fit([[img]])

    masked_img = masker.transform([[img]])
    assert (masked_img[0].dtype == np.float32)
def test_shelving():

    mask_img = Nifti1Image(np.ones((2, 2, 2), dtype=np.int8),
                           affine=np.diag((4, 4, 4, 1)))
    epi_img1 = Nifti1Image(np.ones((2, 2, 2)), affine=np.diag((4, 4, 4, 1)))
    epi_img2 = Nifti1Image(np.ones((2, 2, 2)), affine=np.diag((2, 2, 2, 1)))
    cachedir = mkdtemp()
    try:
        masker_shelved = MultiNiftiMasker(mask_img=mask_img,
                                          memory=Memory(location=cachedir,
                                                        mmap_mode='r',
                                                        verbose=0))
        masker_shelved._shelving = True
        masker = MultiNiftiMasker(mask_img=mask_img)
        epis_shelved = masker_shelved.fit_transform([epi_img1, epi_img2])
        epis = masker.fit_transform([epi_img1, epi_img2])
        for epi_shelved, epi in zip(epis_shelved, epis):
            epi_shelved = epi_shelved.get()
            assert_array_equal(epi_shelved, epi)

        epi = masker.fit_transform(epi_img1)
        epi_shelved = masker_shelved.fit_transform(epi_img1)
        epi_shelved = epi_shelved.get()
        assert_array_equal(epi_shelved, epi)
    finally:
        # enables to delete "filename" on windows
        del masker
        shutil.rmtree(cachedir, ignore_errors=True)
def test_joblib_cache():
    from joblib import hash
    # Dummy mask
    mask = np.zeros((40, 40, 40))
    mask[20, 20, 20] = 1
    mask_img = Nifti1Image(mask, np.eye(4))

    with write_tmp_imgs(mask_img, create_files=True) as filename:
        masker = MultiNiftiMasker(mask_img=filename)
        masker.fit()
        mask_hash = hash(masker.mask_img_)
        get_data(masker.mask_img_)
        assert mask_hash == hash(masker.mask_img_)
        # enables to delete "filename" on windows
        del masker
Example #5
0
def test_masker_attributes_with_fit():
    # Test base module at sub-class
    data, mask_img, components, rng = _make_canica_test_data(n_subjects=3)
    # Passing mask_img
    canica = CanICA(n_components=3, mask=mask_img, random_state=0)
    canica.fit(data)
    assert canica.mask_img_ == mask_img
    assert canica.mask_img_ == canica.masker_.mask_img_
    # Passing masker
    masker = MultiNiftiMasker(mask_img=mask_img)
    canica = CanICA(n_components=3, mask=masker, random_state=0)
    canica.fit(data)
    assert canica.mask_img_ == canica.masker_.mask_img_
    canica = CanICA(mask=mask_img, n_components=3)
    with pytest.raises(ValueError,
                       match="Object has no components_ attribute. "
                       "This is probably because fit has not been called"):
        canica.transform(data)
    # Test if raises an error when empty list of provided.
    with pytest.raises(ValueError,
                       match='Need one or more Niimg-like objects as input, '
                       'an empty list was given.'):
        canica.fit([])
    # Test passing masker arguments to estimator
    canica = CanICA(n_components=3,
                    target_affine=np.eye(4),
                    target_shape=(6, 8, 10),
                    mask_strategy='background')
    canica.fit(data)
def test_compute_multi_gray_matter_mask(strategy):
    imgs = _get_random_imgs((9, 9, 5), 2)
    masker = MultiNiftiMasker(mask_strategy=strategy, mask_args={'opening': 1})
    masker.fit(imgs)
    # Check that the order of the images does not change the output
    masker2 = MultiNiftiMasker(mask_strategy=strategy,
                               mask_args={'opening': 1})
    masker2.fit(imgs[::-1])
    mask_ref = np.zeros((9, 9, 5), dtype='int8')
    np.testing.assert_array_equal(get_data(masker.mask_img_), mask_ref)
    np.testing.assert_array_equal(get_data(masker2.mask_img_), mask_ref)
def test_mask_strategy_errors():
    # Error with unknown mask_strategy
    imgs = _get_random_imgs((9, 9, 5), 2)
    mask = MultiNiftiMasker(mask_strategy='foo')
    with pytest.raises(ValueError,
                       match="Unknown value of mask_strategy 'foo'"):
        mask.fit(imgs)
    # Warning with deprecated 'template' strategy
    mask = MultiNiftiMasker(mask_strategy='template')
    with pytest.warns(UserWarning,
                      match="Masking strategy 'template' is deprecated."):
        mask.fit(imgs)
Example #8
0
def test_mask_reducer():
    shape = (6, 8, 10, 5)
    affine = np.eye(4)
    rng = np.random.RandomState(0)

    # Create a "multi-subject" dataset
    imgs = []
    for i in range(8):
        this_img = rng.normal(size=shape)
        # Create fake activation to get non empty mask
        this_img[2:4, 2:4, 2:4, :] += 10
        imgs.append(nibabel.Nifti1Image(this_img, affine))

    mask_img = nibabel.Nifti1Image(np.ones(shape[:3], dtype=np.int8), affine)
    masker = MultiNiftiMasker(mask_img=mask_img).fit()

    # Test fit on multiple image
    data = mask_and_reduce(masker, imgs)
    assert data.shape == (8 * 5, 6 * 8 * 10)

    data = mask_and_reduce(masker, imgs, n_components=3)
    assert data.shape == (8 * 3, 6 * 8 * 10)

    data = mask_and_reduce(masker, imgs, reduction_ratio=0.4)
    assert data.shape == (8 * 2, 6 * 8 * 10)

    # Test on single image
    data_single = mask_and_reduce(masker, imgs[0], n_components=3)
    assert data_single.shape == (3, 6 * 8 * 10)

    # Test n_jobs > 1
    data = mask_and_reduce(masker,
                           imgs[0],
                           n_components=3,
                           n_jobs=2,
                           random_state=0)
    assert data.shape == (3, 6 * 8 * 10)
    assert_array_almost_equal(data_single, data)

    # Test that reduced data is orthogonal
    data = mask_and_reduce(masker, imgs[0], n_components=3, random_state=0)
    assert data.shape == (3, 6 * 8 * 10)
    cov = data.dot(data.T)
    cov_diag = np.zeros((3, 3))
    for i in range(3):
        cov_diag[i, i] = cov[i, i]
    assert_array_almost_equal(cov, cov_diag)

    # Test reproducibility
    data1 = mask_and_reduce(masker, imgs[0], n_components=3, random_state=0)
    data2 = mask_and_reduce(masker, [imgs[0]] * 2,
                            n_components=3,
                            random_state=0)
    assert_array_almost_equal(np.tile(data1, (2, 1)), data2)
def test_nan():
    data = np.ones((9, 9, 9))
    data[0] = np.nan
    data[:, 0] = np.nan
    data[:, :, 0] = np.nan
    data[-1] = np.nan
    data[:, -1] = np.nan
    data[:, :, -1] = np.nan
    data[3:-3, 3:-3, 3:-3] = 10
    img = Nifti1Image(data, np.eye(4))
    masker = MultiNiftiMasker(mask_args=dict(opening=0))
    masker.fit([img])
    mask = get_data(masker.mask_img_)
    assert mask[1:-1, 1:-1, 1:-1].all()
    assert not mask[0].any()
    assert not mask[:, 0].any()
    assert not mask[:, :, 0].any()
    assert not mask[-1].any()
    assert not mask[:, -1].any()
    assert not mask[:, :, -1].any()
def test_standardization():
    rng = np.random.RandomState(42)
    data_shape = (9, 9, 5)
    n_samples = 500

    signals = rng.standard_normal(size=(2, np.prod(data_shape), n_samples))
    means = rng.standard_normal(size=(2, np.prod(data_shape), 1)) * 50 + 1000
    signals += means

    img1 = Nifti1Image(signals[0].reshape(data_shape + (n_samples, )),
                       np.eye(4))
    img2 = Nifti1Image(signals[1].reshape(data_shape + (n_samples, )),
                       np.eye(4))

    mask = Nifti1Image(np.ones(data_shape), np.eye(4))

    # z-score
    masker = MultiNiftiMasker(mask, standardize='zscore')
    trans_signals = masker.fit_transform([img1, img2])

    for ts in trans_signals:
        np.testing.assert_almost_equal(ts.mean(0), 0)
        np.testing.assert_almost_equal(ts.std(0), 1)

    # psc
    masker = MultiNiftiMasker(mask, standardize='psc')
    trans_signals = masker.fit_transform([img1, img2])

    for ts, s in zip(trans_signals, signals):
        np.testing.assert_almost_equal(ts.mean(0), 0)
        np.testing.assert_almost_equal(
            ts, (s / s.mean(1)[:, np.newaxis] * 100 - 100).T)
def test_3d_images():
    # Test that the MultiNiftiMasker works with 3D images
    mask_img = Nifti1Image(np.ones((2, 2, 2), dtype=np.int8),
                           affine=np.diag((4, 4, 4, 1)))
    epi_img1 = Nifti1Image(np.ones((2, 2, 2)), affine=np.diag((4, 4, 4, 1)))
    epi_img2 = Nifti1Image(np.ones((2, 2, 2)), affine=np.diag((2, 2, 2, 1)))
    masker = MultiNiftiMasker(mask_img=mask_img)
    epis = masker.fit_transform([epi_img1, epi_img2])
    # This is mostly a smoke test
    assert len(epis) == 2

    # verify that 4D mask arguments are refused
    mask_img_4d = Nifti1Image(np.ones((2, 2, 2, 2), dtype=np.int8),
                              affine=np.diag((4, 4, 4, 1)))
    masker2 = MultiNiftiMasker(mask_img=mask_img_4d)
    with pytest.raises(DimensionError,
                       match="Input data has incompatible dimensionality: "
                       "Expected dimension is 3D and you provided "
                       "a 4D image."):
        masker2.fit()
def test_check_embedded_nifti_masker():
    owner = OwningClass()
    masker = _check_embedded_nifti_masker(owner)
    assert type(masker) is MultiNiftiMasker

    for mask, multi_subject in ((MultiNiftiMasker(), True), (NiftiMasker(),
                                                             False)):
        owner = OwningClass(mask=mask)
        masker = _check_embedded_nifti_masker(owner,
                                              multi_subject=multi_subject)
        assert type(masker) == type(mask)
        for param_key in masker.get_params():
            if param_key not in [
                    'memory', 'memory_level', 'n_jobs', 'verbose'
            ]:
                assert (getattr(masker, param_key) == getattr(mask, param_key))
            else:
                assert (getattr(masker,
                                param_key) == getattr(owner, param_key))

    # Check use of mask as mask_img
    shape = (6, 8, 10, 5)
    affine = np.eye(4)
    mask = nibabel.Nifti1Image(np.ones(shape[:3], dtype=np.int8), affine)
    owner = OwningClass(mask=mask)
    masker = _check_embedded_nifti_masker(owner)
    assert masker.mask_img is mask

    # Check attribute forwarding
    data = np.zeros((9, 9, 9))
    data[2:-2, 2:-2, 2:-2] = 10
    imgs = nibabel.Nifti1Image(data, np.eye(4))
    mask = MultiNiftiMasker()
    mask.fit([[imgs]])
    owner = OwningClass(mask=mask)
    masker = _check_embedded_nifti_masker(owner)
    assert masker.mask_img is mask.mask_img_

    # Check conflict warning
    mask = NiftiMasker(mask_strategy='epi')
    owner = OwningClass(mask=mask)
    with pytest.warns(UserWarning):
        _check_embedded_nifti_masker(owner)
# We only use the training data of this study,
# where random binary images were shown.

# training data starts after the first 12 files
fmri_random_runs_filenames = dataset.func[12:]
stimuli_random_runs_filenames = dataset.label[12:]

##############################################################################
# We can use :func:`nilearn.maskers.MultiNiftiMasker` to load the fMRI
# data, clean and mask it.

import numpy as np
from nilearn.maskers import MultiNiftiMasker

masker = MultiNiftiMasker(mask_img=dataset.mask,
                          detrend=True,
                          standardize=True)
masker.fit()
fmri_data = masker.transform(fmri_random_runs_filenames)

# shape of the binary (i.e. black and wihte values) image in pixels
stimulus_shape = (10, 10)

# We load the visual stimuli from csv files
stimuli = []
for stimulus_run in stimuli_random_runs_filenames:
    stimuli.append(
        np.reshape(np.loadtxt(stimulus_run, dtype=int, delimiter=','),
                   (-1, ) + stimulus_shape,
                   order='F'))
Example #14
0
y_shape = (10, 10)

sys.stderr.write(" Done (%.2fs).\n" % (time.time() - t0))

############################################################################
# Then we prepare and mask the data
# ----------------------------------
import numpy as np
from nilearn.maskers import MultiNiftiMasker

sys.stderr.write("Preprocessing data...")
t0 = time.time()

# Load and mask fMRI data
masker = MultiNiftiMasker(mask_img=miyawaki_dataset.mask,
                          detrend=True,
                          standardize=False)
masker.fit()
X_train = masker.transform(X_random_filenames)
X_test = masker.transform(X_figure_filenames)

# We load the visual stimuli from csv files
y_train = []
for y in y_random_filenames:
    y_train.append(
        np.reshape(np.loadtxt(y, dtype=int, delimiter=','), (-1, ) + y_shape,
                   order='F'))

y_test = []
for y in y_figure_filenames:
    y_test.append(
Example #15
0
def test_multi_pca():
    # Smoke test the MultiPCA
    # XXX: this is mostly a smoke test
    shape = (6, 8, 10, 5)
    affine = np.eye(4)
    rng = np.random.RandomState(0)

    # Create a "multi-subject" dataset
    data = []
    for i in range(8):
        this_data = rng.normal(size=shape)
        # Create fake activation to get non empty mask
        this_data[2:4, 2:4, 2:4, :] += 10
        data.append(nibabel.Nifti1Image(this_data, affine))

    mask_img = nibabel.Nifti1Image(np.ones(shape[:3], dtype=np.int8), affine)
    multi_pca = MultiPCA(mask=mask_img, n_components=3,
                         random_state=0)
    # fit to the data and test for masker attributes
    multi_pca.fit(data)
    assert multi_pca.mask_img_ == mask_img
    assert multi_pca.mask_img_ == multi_pca.masker_.mask_img_

    # Test that the components are the same if we put twice the same data, and
    # that fit output is deterministic
    components1 = multi_pca.components_
    components2 = multi_pca.fit(data).components_
    components3 = multi_pca.fit(2 * data).components_
    np.testing.assert_array_equal(components1, components2)
    np.testing.assert_array_almost_equal(components1, components3)

    # Smoke test fit with 'confounds' argument
    confounds = [np.arange(10).reshape(5, 2)] * 8
    multi_pca.fit(data, confounds=confounds)

    # Smoke test that multi_pca also works with single subject data
    multi_pca.fit(data[0])

    # Check that asking for too little components raises a ValueError
    multi_pca = MultiPCA()
    pytest.raises(ValueError, multi_pca.fit, data[:2])

    # Test fit on data with the use of a masker
    masker = MultiNiftiMasker()
    multi_pca = MultiPCA(mask=masker, n_components=3)
    multi_pca.fit(data)
    assert multi_pca.mask_img_ == multi_pca.masker_.mask_img_

    # Smoke test the use of a masker and without CCA
    multi_pca = MultiPCA(mask=MultiNiftiMasker(mask_args=dict(opening=0)),
                         do_cca=False, n_components=3)
    multi_pca.fit(data[:2])

    # Smoke test the transform and inverse_transform
    multi_pca.inverse_transform(multi_pca.transform(data[-2:]))

    # Smoke test to fit with no img
    pytest.raises(TypeError, multi_pca.fit)

    multi_pca = MultiPCA(mask=mask_img, n_components=3)
    with pytest.raises(ValueError,
                       match="Object has no components_ attribute. This is "
                             "probably because fit has not been called"):
        multi_pca.transform(data)
    # Test if raises an error when empty list of provided.
    with pytest.raises(ValueError,
                       match='Need one or more Niimg-like objects as input, '
                             'an empty list was given.'):
        multi_pca.fit([])
    # Test passing masker arguments to estimator
    multi_pca = MultiPCA(target_affine=affine,
                         target_shape=shape[:3],
                         n_components=3,
                         mask_strategy='background')
    multi_pca.fit(data)
def test_auto_mask():
    # This mostly a smoke test
    data = np.zeros((9, 9, 9))
    data[2:-2, 2:-2, 2:-2] = 10
    img = Nifti1Image(data, np.eye(4))
    masker = MultiNiftiMasker(mask_args=dict(opening=0))
    # Check that if we have not fit the masker we get a intelligible
    # error
    pytest.raises(ValueError, masker.transform, [[
        img,
    ]])
    # Check error return due to bad data format
    pytest.raises(ValueError, masker.fit, img)
    # Smoke test the fit
    masker.fit([[img]])

    # Test mask intersection
    data2 = np.zeros((9, 9, 9))
    data2[1:-3, 1:-3, 1:-3] = 10
    img2 = Nifti1Image(data2, np.eye(4))

    masker.fit([[img, img2]])
    assert_array_equal(get_data(masker.mask_img_), np.logical_or(data, data2))
    # Smoke test the transform
    masker.transform([[
        img,
    ]])
    # It should also work with a 3D image
    masker.transform(img)

    # check exception when transform() called without prior fit()
    masker2 = MultiNiftiMasker(mask_img=img)
    with pytest.raises(ValueError, match='has not been fitted. '):
        masker2.transform(img2)