def test_nifti_labels_masker_with_nans_and_infs_in_data():
    """Apply a NiftiLabelsMasker to 4D data containing NaNs and infs.

    The masker should replace those NaNs and infs with zeros,
    while raising a warning.
    """
    length = 3
    n_regions = 9
    fmri_img, mask_img = generate_random_img((13, 11, 12),
                                             affine=np.eye(4),
                                             length=length)
    labels_img = data_gen.generate_labeled_regions((13, 11, 12),
                                                   affine=np.eye(4),
                                                   n_regions=n_regions)
    # Introduce nans with data type float
    # See issues:
    # - https://github.com/nilearn/nilearn/issues/2580 (why floats)
    # - https://github.com/nilearn/nilearn/issues/2711 (why test)
    fmri_data = get_data(fmri_img).astype(np.float32)
    fmri_data[:, :, 7, :] = np.nan
    fmri_data[:, :, 4, 0] = np.inf
    fmri_img = nibabel.Nifti1Image(fmri_data, np.eye(4))

    masker = NiftiLabelsMasker(labels_img, mask_img=mask_img)

    with pytest.warns(UserWarning, match="Non-finite values detected."):
        sig = masker.fit_transform(fmri_img)

    assert sig.shape == (length, n_regions)
    assert np.all(np.isfinite(sig))
def test_standardization():
    rng = np.random.RandomState(42)
    data_shape = (9, 9, 5)
    n_samples = 500

    signals = rng.standard_normal(size=(np.prod(data_shape), n_samples))
    means = rng.standard_normal(size=(np.prod(data_shape), 1)) * 50 + 1000
    signals += means
    img = nibabel.Nifti1Image(signals.reshape(data_shape + (n_samples, )),
                              np.eye(4))

    labels = data_gen.generate_labeled_regions((9, 9, 5), 10)

    # Unstandarized
    masker = NiftiLabelsMasker(labels, standardize=False)
    unstandarized_label_signals = masker.fit_transform(img)

    # z-score
    masker = NiftiLabelsMasker(labels, standardize='zscore')
    trans_signals = masker.fit_transform(img)

    np.testing.assert_almost_equal(trans_signals.mean(0), 0)
    np.testing.assert_almost_equal(trans_signals.std(0), 1)

    # psc
    masker = NiftiLabelsMasker(labels, standardize='psc')
    trans_signals = masker.fit_transform(img)

    np.testing.assert_almost_equal(trans_signals.mean(0), 0)
    np.testing.assert_almost_equal(
        trans_signals, (unstandarized_label_signals /
                        unstandarized_label_signals.mean(0) * 100 - 100))
def test_3d_images():
    # Test that the NiftiLabelsMasker works with 3D images
    affine = np.eye(4)
    n_regions = 3
    shape3 = (2, 2, 2)

    labels33_img = data_gen.generate_labeled_regions(shape3, n_regions)
    mask_img = nibabel.Nifti1Image(np.ones(shape3, dtype=np.int8),
                                   affine=affine)
    epi_img1 = nibabel.Nifti1Image(np.ones(shape3), affine=affine)
    epi_img2 = nibabel.Nifti1Image(np.ones(shape3), affine=affine)
    masker = NiftiLabelsMasker(labels33_img, mask_img=mask_img)

    epis = masker.fit_transform(epi_img1)
    assert (epis.shape == (1, 3))
    epis = masker.fit_transform([epi_img1, epi_img2])
    assert (epis.shape == (2, 3))
示例#4
0
    def transform(self, imgs, confounds=None):
        """Extract signals from :term:`parcellations<parcellation>` learned
        on :term:`fMRI` images.

        Parameters
        ----------
        %(imgs)s
            Images to process.

        confounds : :obj:`list` of CSV files, arrays-like,\
 or :class:`pandas.DataFrame`, optional
            Each file or numpy array in a list should have shape
            (number of scans, number of confounds)
            Must be of same length as imgs.

            .. note::
                This parameter is passed to :func:`nilearn.signal.clean`.
                Please see the related documentation for details.

        Returns
        -------
        region_signals : :obj:`list` of or 2D :class:`numpy.ndarray`
            Signals extracted for each label for each image.
            Example, for single image shape will be
            (number of scans, number of labels)

        """
        self._check_fitted()
        imgs, confounds, single_subject = _check_parameters_transform(
            imgs, confounds)
        # Requires for special cases like extracting signals on list of
        # 3D images
        imgs_list = _iter_check_niimg(imgs, atleast_4d=True)

        masker = NiftiLabelsMasker(self.labels_img_,
                                   mask_img=self.masker_.mask_img_,
                                   smoothing_fwhm=self.smoothing_fwhm,
                                   standardize=self.standardize,
                                   detrend=self.detrend,
                                   low_pass=self.low_pass,
                                   high_pass=self.high_pass, t_r=self.t_r,
                                   resampling_target='data',
                                   memory=self.memory,
                                   memory_level=self.memory_level,
                                   verbose=self.verbose)

        region_signals = Parallel(n_jobs=self.n_jobs)(
            delayed(self._cache(_labels_masker_extraction,
                                func_memory_level=2))
            (img, masker, confound)
            for img, confound in zip(imgs_list, confounds))

        if single_subject:
            return region_signals[0]
        else:
            return region_signals
def test_nifti_labels_masker_reduction_strategies():
    """Tests:
    1. whether the usage of different reduction strategies work.
    2. whether unrecognised strategies raise a ValueError
    3. whether the default option is backwards compatible (calls "mean")
    """
    test_values = [-2., -1., 0., 1., 2]

    img_data = np.array([[test_values, test_values]])

    labels_data = np.array([[[0, 0, 0, 0, 0], [1, 1, 1, 1, 1]]], dtype=np.int8)

    affine = np.eye(4)
    img = nibabel.Nifti1Image(img_data, affine)
    labels = nibabel.Nifti1Image(labels_data, affine)

    # What NiftiLabelsMasker should return for each reduction strategy?
    expected_results = {
        "mean": np.mean(test_values),
        "median": np.median(test_values),
        "sum": np.sum(test_values),
        "minimum": np.min(test_values),
        "maximum": np.max(test_values),
        "standard_deviation": np.std(test_values),
        "variance": np.var(test_values)
    }

    for strategy, expected_result in expected_results.items():
        masker = NiftiLabelsMasker(labels, strategy=strategy)
        # Here passing [img] within a list because it's a 3D object.
        result = masker.fit_transform([img]).squeeze()
        assert result == expected_result

    with pytest.raises(ValueError, match="Invalid strategy 'TESTRAISE'"):
        NiftiLabelsMasker(labels, strategy="TESTRAISE")

    default_masker = NiftiLabelsMasker(labels)
    assert default_masker.strategy == "mean"
def test_nifti_labels_masker_with_mask():
    shape = (13, 11, 12)
    affine = np.eye(4)
    fmri_img, mask_img = generate_random_img(shape, affine=affine, length=3)
    labels_img = data_gen.generate_labeled_regions(shape,
                                                   affine=affine,
                                                   n_regions=7)
    masker = NiftiLabelsMasker(labels_img,
                               resampling_target=None,
                               mask_img=mask_img)
    signals = masker.fit().transform(fmri_img)
    bg_masker = NiftiMasker(mask_img).fit()
    masked_labels = bg_masker.inverse_transform(
        bg_masker.transform(labels_img))
    masked_masker = NiftiLabelsMasker(masked_labels,
                                      resampling_target=None,
                                      mask_img=mask_img)
    masked_signals = masked_masker.fit().transform(fmri_img)
    assert np.allclose(signals, masked_signals)
示例#7
0
def test_img_to_signals_labels_non_float_type(target_dtype):
    fake_fmri_data = (np.random.RandomState(42).uniform(size=(10, 10, 10, 10))
                      > 0.5)
    fake_affine = np.eye(4, 4).astype(np.float64)
    fake_fmri_img_orig = nibabel.Nifti1Image(
        fake_fmri_data.astype(np.float64),
        fake_affine,
    )
    fake_fmri_img_target_dtype = new_img_like(
        fake_fmri_img_orig, fake_fmri_data.astype(target_dtype))
    fake_mask_data = np.ones((10, 10, 10), dtype=np.uint8)
    fake_mask = nibabel.Nifti1Image(fake_mask_data, fake_affine)

    masker = NiftiLabelsMasker(fake_mask)
    masker.fit()
    timeseries_int = masker.transform(fake_fmri_img_target_dtype)
    timeseries_float = masker.transform(fake_fmri_img_orig)
    assert np.sum(timeseries_int) != 0
    assert np.allclose(timeseries_int, timeseries_float)
示例#8
0
##########################################################################
# Extract coordinates on Yeo atlas - parcellations
# ------------------------------------------------
from nilearn.maskers import NiftiLabelsMasker
from nilearn.connectome import ConnectivityMeasure

# ConenctivityMeasure from Nilearn uses simple 'correlation' to compute
# connectivity matrices for all subjects in a list
connectome_measure = ConnectivityMeasure(kind='correlation')

# useful for plotting connectivity interactions on glass brain
from nilearn import plotting

# create masker to extract functional data within atlas parcels
masker = NiftiLabelsMasker(labels_img=yeo['thick_17'],
                           standardize=True,
                           memory='nilearn_cache')

# extract time series from all subjects and concatenate them
time_series = []
for func, confounds in zip(data.func, data.confounds):
    time_series.append(masker.fit_transform(func, confounds=confounds))

# calculate correlation matrices across subjects and display
correlation_matrices = connectome_measure.fit_transform(time_series)

# Mean correlation matrix across 10 subjects can be grabbed like this,
# using connectome measure object
mean_correlation_matrix = connectome_measure.mean_

# grab center coordinates for atlas labels
示例#9
0
print('Atlas ROIs are located in nifti image (4D) at: %s' %
      atlas_filename)  # 4D data

# One subject of brain development fmri data
data = datasets.fetch_development_fmri(n_subjects=1, reduce_confounds=True)
fmri_filenames = data.func[0]
reduced_confounds = data.confounds[0]  # This is a preselected set of confounds

##############################################################################
# Extract signals on a parcellation defined by labels
# ---------------------------------------------------
# Using the NiftiLabelsMasker
from nilearn.maskers import NiftiLabelsMasker
masker = NiftiLabelsMasker(labels_img=atlas_filename,
                           standardize=True,
                           memory='nilearn_cache',
                           verbose=5)

# Here we go from nifti files to the signal time series in a numpy
# array. Note how we give confounds to be regressed out during signal
# extraction
time_series = masker.fit_transform(fmri_filenames, confounds=reduced_confounds)

##############################################################################
# Compute and display a correlation matrix
# ----------------------------------------
from nilearn.connectome import ConnectivityMeasure
correlation_measure = ConnectivityMeasure(kind='correlation')
correlation_matrix = correlation_measure.fit_transform([time_series])[0]

# Plot the correlation matrix
def test_nifti_labels_masker():
    # Check working of shape/affine checks
    shape1 = (13, 11, 12)
    affine1 = np.eye(4)

    shape2 = (12, 10, 14)
    affine2 = np.diag((1, 2, 3, 1))

    n_regions = 9
    length = 3

    fmri11_img, mask11_img = generate_random_img(shape1,
                                                 affine=affine1,
                                                 length=length)
    fmri12_img, mask12_img = generate_random_img(shape1,
                                                 affine=affine2,
                                                 length=length)
    fmri21_img, mask21_img = generate_random_img(shape2,
                                                 affine=affine1,
                                                 length=length)

    labels11_img = data_gen.generate_labeled_regions(shape1,
                                                     affine=affine1,
                                                     n_regions=n_regions)

    mask_img_4d = nibabel.Nifti1Image(np.ones((2, 2, 2, 2), dtype=np.int8),
                                      affine=np.diag((4, 4, 4, 1)))

    # verify that 4D mask arguments are refused
    masker = NiftiLabelsMasker(labels11_img, mask_img=mask_img_4d)
    with pytest.raises(DimensionError,
                       match="Input data has incompatible dimensionality: "
                       "Expected dimension is 3D and you provided "
                       "a 4D image."):
        masker.fit()

    # check exception when transform() called without prior fit()
    masker11 = NiftiLabelsMasker(labels11_img, resampling_target=None)
    with pytest.raises(ValueError, match='has not been fitted. '):
        masker11.transform(fmri11_img)

    # No exception raised here
    signals11 = masker11.fit().transform(fmri11_img)
    assert signals11.shape == (length, n_regions)

    # No exception should be raised either
    masker11 = NiftiLabelsMasker(labels11_img, resampling_target=None)
    masker11.fit()
    masker11.inverse_transform(signals11)

    masker11 = NiftiLabelsMasker(labels11_img,
                                 mask_img=mask11_img,
                                 resampling_target=None)
    signals11 = masker11.fit().transform(fmri11_img)
    assert signals11.shape == (length, n_regions)

    # Test all kinds of mismatch between shapes and between affines
    masker11 = NiftiLabelsMasker(labels11_img, resampling_target=None)
    masker11.fit()
    pytest.raises(ValueError, masker11.transform, fmri12_img)
    pytest.raises(ValueError, masker11.transform, fmri21_img)

    masker11 = NiftiLabelsMasker(labels11_img,
                                 mask_img=mask12_img,
                                 resampling_target=None)
    pytest.raises(ValueError, masker11.fit)

    masker11 = NiftiLabelsMasker(labels11_img,
                                 mask_img=mask21_img,
                                 resampling_target=None)
    pytest.raises(ValueError, masker11.fit)

    # Transform, with smoothing (smoke test)
    masker11 = NiftiLabelsMasker(labels11_img,
                                 smoothing_fwhm=3,
                                 resampling_target=None)
    signals11 = masker11.fit().transform(fmri11_img)
    assert signals11.shape == (length, n_regions)

    masker11 = NiftiLabelsMasker(labels11_img,
                                 smoothing_fwhm=3,
                                 resampling_target=None)
    signals11 = masker11.fit_transform(fmri11_img)
    assert signals11.shape == (length, n_regions)

    with pytest.raises(ValueError, match='has not been fitted. '):
        NiftiLabelsMasker(labels11_img).inverse_transform(signals11)

    # Call inverse transform (smoke test)
    fmri11_img_r = masker11.inverse_transform(signals11)
    assert fmri11_img_r.shape == fmri11_img.shape
    np.testing.assert_almost_equal(fmri11_img_r.affine, fmri11_img.affine)
def test_nifti_labels_masker_resampling():
    # Test resampling in NiftiLabelsMasker
    shape1 = (10, 11, 12)
    affine = np.eye(4)

    # mask
    shape2 = (16, 17, 18)

    # labels
    shape3 = (13, 14, 15)

    n_regions = 9
    length = 3

    # With data of the same affine
    fmri11_img, _ = generate_random_img(shape1, affine=affine, length=length)
    _, mask22_img = generate_random_img(shape2, affine=affine, length=length)

    labels33_img = data_gen.generate_labeled_regions(shape3,
                                                     n_regions,
                                                     affine=affine)

    # Test error checking
    pytest.raises(ValueError,
                  NiftiLabelsMasker,
                  labels33_img,
                  resampling_target="mask")
    pytest.raises(ValueError,
                  NiftiLabelsMasker,
                  labels33_img,
                  resampling_target="invalid")

    # Target: labels
    masker = NiftiLabelsMasker(labels33_img,
                               mask_img=mask22_img,
                               resampling_target="labels")

    masker.fit()
    np.testing.assert_almost_equal(masker.labels_img_.affine,
                                   labels33_img.affine)
    assert masker.labels_img_.shape == labels33_img.shape

    np.testing.assert_almost_equal(masker.mask_img_.affine,
                                   masker.labels_img_.affine)
    assert masker.mask_img_.shape == masker.labels_img_.shape[:3]

    transformed = masker.transform(fmri11_img)
    assert transformed.shape == (length, n_regions)

    fmri11_img_r = masker.inverse_transform(transformed)
    np.testing.assert_almost_equal(fmri11_img_r.affine,
                                   masker.labels_img_.affine)
    assert (fmri11_img_r.shape == (masker.labels_img_.shape[:3] + (length, )))

    # Test with clipped labels: mask does not contain all labels.
    # Shapes do matter in that case, because there is some resampling
    # taking place.
    shape1 = (10, 11, 12)  # fmri
    shape2 = (8, 9, 10)  # mask
    shape3 = (16, 18, 20)  # maps

    n_regions = 9
    length = 21

    fmri11_img, _ = generate_random_img(shape1, affine=affine, length=length)
    _, mask22_img = generate_random_img(shape2, affine=affine, length=length)

    # Target: labels
    labels33_img = data_gen.generate_labeled_regions(shape3,
                                                     n_regions,
                                                     affine=affine)

    masker = NiftiLabelsMasker(labels33_img,
                               mask_img=mask22_img,
                               resampling_target="labels")

    masker.fit()
    np.testing.assert_almost_equal(masker.labels_img_.affine,
                                   labels33_img.affine)
    assert masker.labels_img_.shape == labels33_img.shape

    np.testing.assert_almost_equal(masker.mask_img_.affine,
                                   masker.labels_img_.affine)
    assert masker.mask_img_.shape == masker.labels_img_.shape[:3]

    uniq_labels = np.unique(get_data(masker.labels_img_))
    assert uniq_labels[0] == 0
    assert len(uniq_labels) - 1 == n_regions

    transformed = masker.transform(fmri11_img)
    assert transformed.shape == (length, n_regions)
    # Some regions have been clipped. Resulting signal must be zero
    assert (transformed.var(axis=0) == 0).sum() < n_regions

    fmri11_img_r = masker.inverse_transform(transformed)
    np.testing.assert_almost_equal(fmri11_img_r.affine,
                                   masker.labels_img_.affine)
    assert (fmri11_img_r.shape == (masker.labels_img_.shape[:3] + (length, )))

    # Test with data and atlas of different shape: the atlas should be
    # resampled to the data
    shape22 = (5, 5, 6)
    affine2 = 2 * np.eye(4)
    affine2[-1, -1] = 1

    fmri22_img, _ = generate_random_img(shape22, affine=affine2, length=length)
    masker = NiftiLabelsMasker(labels33_img, mask_img=mask22_img)

    masker.fit_transform(fmri22_img)
    np.testing.assert_array_equal(masker._resampled_labels_img_.affine,
                                  affine2)

    # Test with filenames
    with testing.write_tmp_imgs(fmri22_img) as filename:
        masker = NiftiLabelsMasker(labels33_img, resampling_target='data')
        masker.fit_transform(filename)

    # test labels masker with resampling target in 'data', 'labels' to return
    # resampled labels having number of labels equal with transformed shape of
    # 2nd dimension. This tests are added based on issue #1673 in Nilearn
    shape = (13, 11, 12)
    affine = np.eye(4) * 2

    fmri_img, _ = generate_random_img(shape, affine=affine, length=21)
    labels_img = data_gen.generate_labeled_regions((9, 8, 6),
                                                   affine=np.eye(4),
                                                   n_regions=10)
    for resampling_target in ['data', 'labels']:
        masker = NiftiLabelsMasker(labels_img=labels_img,
                                   resampling_target=resampling_target)
        if resampling_target == 'data':
            with pytest.warns(UserWarning,
                              match=("After resampling the label image "
                                     "to the data image, the following "
                                     "labels were removed")):
                transformed = masker.fit_transform(fmri_img)
        else:
            transformed = masker.fit_transform(fmri_img)
        resampled_labels_img = masker._resampled_labels_img_
        n_resampled_labels = len(np.unique(get_data(resampled_labels_img)))
        assert n_resampled_labels - 1 == transformed.shape[1]
        # inverse transform
        compressed_img = masker.inverse_transform(transformed)

        # Test that compressing the image a second time should yield an image
        # with the same data as compressed_img.
        transformed2 = masker.fit_transform(fmri_img)
        # inverse transform again
        compressed_img2 = masker.inverse_transform(transformed2)
        np.testing.assert_array_equal(get_data(compressed_img),
                                      get_data(compressed_img2))
# Load an atlas
#
# We then load the Harvard-Oxford atlas to define the brain regions
atlas = datasets.fetch_atlas_harvard_oxford('cort-maxprob-thr25-2mm')

# The first label correspond to the background
print('The atlas contains {} non-overlapping regions'.format(
    len(atlas.labels) - 1))

###########################################################################
# Instantiate the mask and visualize atlas
#
from nilearn.maskers import NiftiLabelsMasker

# Instantiate the masker with label image and label values
masker = NiftiLabelsMasker(atlas.maps, labels=atlas.labels, standardize=True)

# Visualize the atlas
# Note that we need to call fit prior to generating the mask
masker.fit()

# At this point, no functional image has been provided to the masker.
# We can still generate a report which can be displayed in a Jupyter
# Notebook, opened in a browser using the .open_in_browser() method,
# or saved to a file using the .save_as_html(output_filepath) method.
report = masker.generate_report()
report

##########################################################################
# Fitting the mask and generating a report
masker.fit(func_filename)
##############################################################################
# Use the new ROIs, to extract data maps in both ROIs

# We extract data from ROIs using nilearn's NiftiLabelsMasker
from nilearn.maskers import NiftiLabelsMasker

# Before data extraction, we convert an array labels to Nifti like image. All
# inputs to NiftiLabelsMasker must be Nifti-like images or filename to Nifti
# images. We use the same reference image as used above in previous sections
labels_img = new_img_like(fmri_img, labels)
# First, initialize masker with parameters suited for data extraction using
# labels as input image, resampling_target is None as affine, shape/size is same
# for all the data used here, time series signal processing parameters
# standardize and detrend are set to False
masker = NiftiLabelsMasker(labels_img,
                           resampling_target=None,
                           standardize=False,
                           detrend=False)
# After initialization of masker object, we call fit() for preparing labels_img
# data according to given parameters
masker.fit()
# Preparing for data extraction: setting number of conditions, size, etc from
# haxby dataset
condition_names = haxby_labels.unique()
n_cond_img = fmri_data[..., haxby_labels == 'house'].shape[-1]
n_conds = len(condition_names)

X1, X2 = np.zeros((n_cond_img, n_conds)), np.zeros((n_cond_img, n_conds))
# Gathering data for each condition and then use transformer from masker
# object transform() on each data. The transformer extracts data in condition
# maps where the target regions are specified by labels images
for i, cond in enumerate(condition_names):
示例#14
0
def test_nifti_labels_masker_report(data_img_3d, mask):
    shape = (13, 11, 12)
    affine = np.diag([2, 2, 2, 1])
    n_regions = 9
    labels = ['background'
              ] + ['region_{}'.format(i) for i in range(1, n_regions + 1)]
    EXPECTED_COLUMNS = [
        'label value', 'region name', 'size (in mm^3)', 'relative size (in %)'
    ]
    labels_img = generate_labeled_regions(shape,
                                          affine=affine,
                                          n_regions=n_regions)
    labels_img_floats = new_img_like(labels_img,
                                     get_data(labels_img).astype(float))
    masker = NiftiLabelsMasker(labels_img_floats, labels=labels)
    masker.fit()
    masker.generate_report()

    # Check that providing incorrect labels raises an error
    masker = NiftiLabelsMasker(labels_img, labels=labels[:-1])
    masker.fit()
    with pytest.raises(ValueError,
                       match="Mismatch between the number of provided labels"):
        masker.generate_report()
    masker = NiftiLabelsMasker(labels_img, labels=labels)
    masker.fit()
    # Check that a warning is given when generating the report
    # since no image was provided to fit
    with pytest.warns(UserWarning,
                      match="No image provided to fit in NiftiLabelsMasker"):
        masker.generate_report()

    # No image was provided to fit, regions are plotted using
    # plot_roi such that no contour should be in the image
    display = masker._reporting()
    for d in ['x', 'y', 'z']:
        assert len(display[0].axes[d].ax.collections) == 0

    masker = NiftiLabelsMasker(labels_img, labels=labels)
    masker.fit(data_img_3d)

    display = masker._reporting()
    for d in ['x', 'y', 'z']:
        assert len(display[0].axes[d].ax.collections) > 0
        assert len(display[0].axes[d].ax.collections) <= n_regions

    masker = NiftiLabelsMasker(labels_img, labels=labels, mask_img=mask)
    masker.fit(data_img_3d)
    report = masker.generate_report()
    assert masker._reporting_data is not None
    # Check that background label was left as default
    assert masker.background_label == 0
    assert masker._report_content['description'] == (
        'This reports shows the regions defined by the labels of the mask.')
    # Check that the number of regions is correct
    assert masker._report_content['number_of_regions'] == n_regions
    # Check that all expected columns are present with the right size
    for col in EXPECTED_COLUMNS:
        assert col in masker._report_content['summary']
        assert len(masker._report_content['summary'][col]) == n_regions
    # Check that labels match
    assert masker._report_content['summary']['region name'] == labels[1:]
    # Relative sizes of regions should sum to 100%
    assert_almost_equal(
        sum(masker._report_content['summary']['relative size (in %)']), 100)
    _check_html(report)
    assert "Regions summary" in str(report)
    # Check region sizes calculations
    expected_region_sizes = Counter(get_data(labels_img).ravel())
    for r in range(1, n_regions + 1):
        assert_almost_equal(
            masker._report_content['summary']['size (in mm^3)'][r - 1],
            expected_region_sizes[r] * np.abs(np.linalg.det(affine[:3, :3])))

    # Check that region labels are no displayed in the report
    # when they were not provided by the user.
    masker = NiftiLabelsMasker(labels_img)
    masker.fit()
    report = masker.generate_report()
    for col in EXPECTED_COLUMNS:
        if col == "region name":
            assert col not in masker._report_content["summary"]
        else:
            assert col in masker._report_content["summary"]
            assert len(masker._report_content['summary'][col]) == n_regions