def test_region_extractor_fit_and_transform():
    n_regions = 9
    n_subjects = 5
    maps, mask_img = generate_maps((40, 40, 40), n_regions=n_regions)

    # smoke test to RegionExtractor with thresholding_strategy='ratio_n_voxels'
    extract_ratio = RegionExtractor(maps, threshold=0.2,
                                    thresholding_strategy='ratio_n_voxels')
    extract_ratio.fit()
    assert_not_equal(extract_ratio.regions_img_, '')
    assert_true(extract_ratio.regions_img_.shape[-1] >= 9)

    # smoke test with threshold=string and strategy=percentile
    extractor = RegionExtractor(maps, threshold=30,
                                thresholding_strategy='percentile',
                                mask_img=mask_img)
    extractor.fit()
    assert_true(extractor.index_, np.ndarray)
    assert_not_equal(extractor.regions_img_, '')
    assert_true(extractor.regions_img_.shape[-1] >= 9)

    n_regions_extracted = extractor.regions_img_.shape[-1]
    shape = (91, 109, 91, 7)
    expected_signal_shape = (7, n_regions_extracted)
    for id_ in range(n_subjects):
        img, data = _make_random_data(shape)
        # smoke test NiftiMapsMasker transform inherited in Region Extractor
        signal = extractor.transform(img)
        assert_equal(expected_signal_shape, signal.shape)
Пример #2
0
def test_region_extractor_fit_and_transform():
    n_regions = 9
    n_subjects = 5
    maps, mask_img = generate_maps((40, 40, 40), n_regions=n_regions)

    # smoke test to RegionExtractor with thresholding_strategy='ratio_n_voxels'
    extract_ratio = RegionExtractor(maps, threshold=0.2,
                                    thresholding_strategy='ratio_n_voxels')
    extract_ratio.fit()
    assert_not_equal(extract_ratio.regions_img_, '')
    assert_true(extract_ratio.regions_img_.shape[-1] >= 9)

    # smoke test with threshold=string and strategy=percentile
    extractor = RegionExtractor(maps, threshold=30,
                                thresholding_strategy='percentile',
                                mask_img=mask_img)
    extractor.fit()
    assert_true(extractor.index_, np.ndarray)
    assert_not_equal(extractor.regions_img_, '')
    assert_true(extractor.regions_img_.shape[-1] >= 9)

    n_regions_extracted = extractor.regions_img_.shape[-1]
    shape = (91, 109, 91, 7)
    expected_signal_shape = (7, n_regions_extracted)
    for id_ in range(n_subjects):
        img, data = _make_random_data(shape)
        # smoke test NiftiMapsMasker transform inherited in Region Extractor
        signal = extractor.transform(img)
        assert_equal(expected_signal_shape, signal.shape)

    # smoke test with high resolution image
    maps, mask_img = generate_maps((20, 20, 20), n_regions=n_regions,
                                   affine=.2 * np.eye(4))

    extract_ratio = RegionExtractor(maps,
                                    thresholding_strategy='ratio_n_voxels',
                                    smoothing_fwhm=.6,
                                    min_region_size=.4)
    extract_ratio.fit()
    assert_not_equal(extract_ratio.regions_img_, '')
    assert_true(extract_ratio.regions_img_.shape[-1] >= 9)

    # smoke test with zeros on the diagonal of the affine
    affine = np.eye(4)
    affine[[0, 1]] = affine[[1, 0]]  # permutes first and second lines
    maps, mask_img = generate_maps((40, 40, 40), n_regions=n_regions,
                                   affine=affine)

    extract_ratio = RegionExtractor(maps, threshold=0.2,
                                    thresholding_strategy='ratio_n_voxels')
    extract_ratio.fit()
    assert_not_equal(extract_ratio.regions_img_, '')
    assert_true(extract_ratio.regions_img_.shape[-1] >= 9)
Пример #3
0
def test_region_extractor_fit_and_transform():
    n_regions = 9
    n_subjects = 5
    maps, mask_img = generate_maps((40, 40, 40), n_regions=n_regions)

    # Test maps are zero in the mask
    mask_data = get_data(mask_img)
    mask_data[1, 1, 1] = 0
    extractor_without_mask = RegionExtractor(maps)
    extractor_without_mask.fit()
    extractor_with_mask = RegionExtractor(maps, mask_img=mask_img)
    extractor_with_mask.fit()
    assert not np.all(
        get_data(extractor_without_mask.regions_img_)[mask_data == 0] == 0.)
    assert np.all(
        get_data(extractor_with_mask.regions_img_)[mask_data == 0] == 0.)

    # smoke test to RegionExtractor with thresholding_strategy='ratio_n_voxels'
    extract_ratio = RegionExtractor(maps, threshold=0.2,
                                    thresholding_strategy='ratio_n_voxels')
    extract_ratio.fit()
    assert extract_ratio.regions_img_ != ''
    assert extract_ratio.regions_img_.shape[-1] >= 9

    # smoke test with threshold=string and strategy=percentile
    extractor = RegionExtractor(maps, threshold=30,
                                thresholding_strategy='percentile',
                                mask_img=mask_img)
    extractor.fit()
    assert extractor.index_, np.ndarray
    assert extractor.regions_img_ != ''
    assert extractor.regions_img_.shape[-1] >= 9

    n_regions_extracted = extractor.regions_img_.shape[-1]
    shape = (91, 109, 91, 7)
    expected_signal_shape = (7, n_regions_extracted)
    for id_ in range(n_subjects):
        img, data = _make_random_data(shape)
        # smoke test NiftiMapsMasker transform inherited in Region Extractor
        signal = extractor.transform(img)
        assert expected_signal_shape == signal.shape

    # smoke test with high resolution image
    maps, mask_img = generate_maps((20, 20, 20), n_regions=n_regions,
                                   affine=.2 * np.eye(4))

    extract_ratio = RegionExtractor(maps,
                                    thresholding_strategy='ratio_n_voxels',
                                    smoothing_fwhm=.6,
                                    min_region_size=.4)
    extract_ratio.fit()
    assert extract_ratio.regions_img_ != ''
    assert extract_ratio.regions_img_.shape[-1] >= 9

    # smoke test with zeros on the diagonal of the affine
    affine = np.eye(4)
    affine[[0, 1]] = affine[[1, 0]]  # permutes first and second lines
    maps, mask_img = generate_maps((40, 40, 40), n_regions=n_regions,
                                   affine=affine)

    extract_ratio = RegionExtractor(maps, threshold=0.2,
                                    thresholding_strategy='ratio_n_voxels')
    extract_ratio.fit()
    assert extract_ratio.regions_img_ != ''
    assert extract_ratio.regions_img_.shape[-1] >= 9
################################################################################
# Computing correlation coefficients and plotting a connectome

# First we need to do subjects timeseries signals extraction and then estimating
# correlation matrices on those signals.
# To extract timeseries signals, we call transform() from RegionExtractor object
# onto each subject functional data stored in func_filenames.
# To estimate correlation matrices we import connectome utilities from nilearn
from nilearn.connectome import ConnectivityMeasure

correlations = []
# Initializing ConnectivityMeasure object with kind='correlation'
connectome_measure = ConnectivityMeasure(kind='correlation')
for filename, confound in zip(func_filenames, confounds):
    # call transform from RegionExtractor object to extract timeseries signals
    timeseries_each_subject = extractor.transform(filename, confounds=confound)
    # call fit_transform from ConnectivityMeasure object
    correlation = connectome_measure.fit_transform([timeseries_each_subject])
    # saving each subject correlation to correlations
    correlations.append(correlation)

# Mean of all correlations
import numpy as np

mean_correlations = np.mean(correlations,
                            axis=0).reshape(n_regions_extracted,
                                            n_regions_extracted)

# Visualization
# Plotting connectome results
import matplotlib.pyplot as plt
# ---------------------------------

# First we need to do subjects timeseries signals extraction and then estimating
# correlation matrices on those signals.
# To extract timeseries signals, we call transform() from RegionExtractor object
# onto each subject functional data stored in func_filenames.
# To estimate correlation matrices we import connectome utilities from nilearn
from nilearn.connectome import ConnectivityMeasure

time_series = []
correlations = []
# Initializing ConnectivityMeasure object with kind='correlation'
connectome_measure = ConnectivityMeasure(kind='correlation')
for bold in func:
    # call transform from RegionExtractor object to extract timeseries signals
    timeseries_each_subject = extractor.transform(bold)

    # append time series
    time_series.append(timeseries_each_subject)

    # call fit_transform from ConnectivityMeasure object
    correlation = connectome_measure.fit_transform([timeseries_each_subject])
    # saving each subject correlation to correlations
    correlations.append(
        np.reshape(correlation, (n_regions_extracted, n_regions_extracted)))

# Mean of all correlations
import numpy as np
mean_correlations11 = np.mean(correlations,
                              axis=0).reshape(n_regions_extracted,
                                              n_regions_extracted)
# Compute correlation coefficients
# ---------------------------------

# First we need to do subjects timeseries signals extraction and then estimating
# correlation matrices on those signals.
# To extract timeseries signals, we call transform() from RegionExtractor object
# onto each subject functional data stored in func_filenames.
# To estimate correlation matrices we import connectome utilities from nilearn
from nilearn.connectome import ConnectivityMeasure

correlations = []
# Initializing ConnectivityMeasure object with kind='correlation'
connectome_measure = ConnectivityMeasure(kind='correlation')
for filename, confound in zip(func_filenames, confounds):
    # call transform from RegionExtractor object to extract timeseries signals
    timeseries_each_subject = extractor.transform(filename, confounds=confound)
    # call fit_transform from ConnectivityMeasure object
    correlation = connectome_measure.fit_transform([timeseries_each_subject])
    # saving each subject correlation to correlations
    correlations.append(correlation)

# Mean of all correlations
import numpy as np
mean_correlations = np.mean(correlations, axis=0).reshape(n_regions_extracted,
                                                          n_regions_extracted)

###############################################################################
# Plot resulting connectomes
# ----------------------------

title = 'Correlation between %d regions' % n_regions_extracted
# ---------------------------------

# First we need to do subjects timeseries signals extraction and then estimating
# correlation matrices on those signals.
# To extract timeseries signals, we call transform() from RegionExtractor object
# onto each subject functional data stored in func_filenames.
# To estimate correlation matrices we import connectome utilities from nilearn
from nilearn.connectome import ConnectivityMeasure

time_series = []
correlations = []
# Initializing ConnectivityMeasure object with kind='correlation'
connectome_measure = ConnectivityMeasure(kind='correlation')
for filename in func:
    # call transform from RegionExtractor object to extract timeseries signals
    timeseries_each_subject = extractor.transform(filename)

    # append time series
    time_series.append(timeseries_each_subject)

    # call fit_transform from ConnectivityMeasure object
    correlation = connectome_measure.fit_transform([timeseries_each_subject])
    # saving each subject correlation to correlations
    correlations.append(correlation)

# Mean of all correlations
import numpy as np

mean_correlations = np.mean(correlations,
                            axis=0).reshape(n_regions_extracted,
                                            n_regions_extracted)