def test_nifti_spheres_masker_overlap(): # Test resampling in NiftiMapsMasker affine = np.eye(4) shape = (5, 5, 5) data = np.random.RandomState(42).random_sample(shape + (5, )) fmri_img = nibabel.Nifti1Image(data, affine) seeds = [(0, 0, 0), (2, 2, 2)] overlapping_masker = NiftiSpheresMasker(seeds, radius=1, allow_overlap=True) overlapping_masker.fit_transform(fmri_img) overlapping_masker = NiftiSpheresMasker(seeds, radius=2, allow_overlap=True) overlapping_masker.fit_transform(fmri_img) noverlapping_masker = NiftiSpheresMasker(seeds, radius=1, allow_overlap=False) noverlapping_masker.fit_transform(fmri_img) noverlapping_masker = NiftiSpheresMasker(seeds, radius=2, allow_overlap=False) with pytest.raises(ValueError, match='Overlap detected'): noverlapping_masker.fit_transform(fmri_img)
def test_small_radius_inverse(): affine = np.eye(4) shape = (3, 3, 3) data = np.random.RandomState(42).random_sample(shape) mask = np.zeros(shape) mask[1, 1, 1] = 1 mask[2, 2, 2] = 1 affine = np.eye(4) * 1.2 seed = (1.4, 1.4, 1.4) masker = NiftiSpheresMasker([seed], radius=0.1, mask_img=nibabel.Nifti1Image(mask, affine)) spheres_data = masker.fit_transform(nibabel.Nifti1Image(data, affine)) masker.inverse_transform(spheres_data) # Test if masking is taken into account mask[1, 1, 1] = 0 mask[1, 1, 0] = 1 masker = NiftiSpheresMasker([seed], radius=0.1, mask_img=nibabel.Nifti1Image(mask, affine)) masker.fit(nibabel.Nifti1Image(data, affine)) with pytest.raises(ValueError, match='These spheres are empty'): masker.inverse_transform(spheres_data) masker = NiftiSpheresMasker([seed], radius=1.6, mask_img=nibabel.Nifti1Image(mask, affine)) masker.fit(nibabel.Nifti1Image(data, affine)) masker.inverse_transform(spheres_data)
def test_standardization(): data = np.random.RandomState(42).random_sample((3, 3, 3, 5)) img = nibabel.Nifti1Image(data, np.eye(4)) # test zscore masker = NiftiSpheresMasker([(1, 1, 1)], standardize='zscore') # Test the fit s = masker.fit_transform(img) np.testing.assert_almost_equal(s.mean(), 0) np.testing.assert_almost_equal(s.std(), 1) # test psc masker = NiftiSpheresMasker([(1, 1, 1)], standardize='psc') # Test the fit s = masker.fit_transform(img) np.testing.assert_almost_equal(s.mean(), 0) np.testing.assert_almost_equal( s.ravel(), data[1, 1, 1] / data[1, 1, 1].mean() * 100 - 100, )
def test_is_nifti_spheres_masker_give_nans(): affine = np.eye(4) data_with_nans = np.zeros((10, 10, 10), dtype=np.float32) data_with_nans[:, :, :] = np.nan data_without_nans = np.random.RandomState(42).random_sample((9, 9, 9)) indices = np.nonzero(data_without_nans) # Leaving nans outside of some data data_with_nans[indices] = data_without_nans[indices] img = nibabel.Nifti1Image(data_with_nans, affine) seed = [(7, 7, 7)] # Interaction of seed with nans masker = NiftiSpheresMasker(seeds=seed, radius=2.) assert not np.isnan(np.sum(masker.fit_transform(img))) mask = np.ones((9, 9, 9)) mask_img = nibabel.Nifti1Image(mask, affine) # When mask_img is provided, the seed interacts within the brain, so no nan masker = NiftiSpheresMasker(seeds=seed, radius=2., mask_img=mask_img) assert not np.isnan(np.sum(masker.fit_transform(img)))
def report_flm_adhd_dmn(): # pragma: no cover t_r = 2. slice_time_ref = 0. n_scans = 176 pcc_coords = (0, -53, 26) adhd_dataset = nilearn.datasets.fetch_adhd(n_subjects=1) seed_masker = NiftiSpheresMasker([pcc_coords], radius=10, detrend=True, standardize=True, low_pass=0.1, high_pass=0.01, t_r=2., memory='nilearn_cache', memory_level=1, verbose=0) seed_time_series = seed_masker.fit_transform(adhd_dataset.func[0]) frametimes = np.linspace(0, (n_scans - 1) * t_r, n_scans) design_matrix = make_first_level_design_matrix(frametimes, hrf_model='spm', add_regs=seed_time_series, add_reg_names=["pcc_seed"]) dmn_contrast = np.array([1] + [0] * (design_matrix.shape[1] - 1)) contrasts = {'seed_based_glm': dmn_contrast} first_level_model = FirstLevelModel(t_r=t_r, slice_time_ref=slice_time_ref) first_level_model = first_level_model.fit(run_imgs=adhd_dataset.func[0], design_matrices=design_matrix) report = make_glm_report( first_level_model, contrasts=contrasts, title='ADHD DMN Report', cluster_threshold=15, height_control='bonferroni', min_distance=8., plot_type='glass', report_dims=(1200, 'a'), ) output_filename = 'generated_report_flm_adhd_dmn.html' output_filepath = os.path.join(REPORTS_DIR, output_filename) report.save_as_html(output_filepath) report.get_iframe()
# Extract the largest clusters # ---------------------------- from nilearn.reporting import get_clusters_table from nilearn.maskers import NiftiSpheresMasker table = get_clusters_table(z_map, stat_threshold=3.1, cluster_threshold=20).set_index('Cluster ID', drop=True) table.head() # get the 6 largest clusters' max x, y, and z coordinates coords = table.loc[range(1, 7), ['X', 'Y', 'Z']].values # extract time series from each coordinate masker = NiftiSpheresMasker(coords) real_timeseries = masker.fit_transform(fmri_img) predicted_timeseries = masker.fit_transform(fmri_glm.predicted[0]) ######################################################################### # Plot predicted and actual time series for 6 most significant clusters # --------------------------------------------------------------------- import matplotlib.pyplot as plt # colors for each of the clusters colors = ['blue', 'navy', 'purple', 'magenta', 'olive', 'teal'] # plot the time series and corresponding locations fig1, axs1 = plt.subplots(2, 6) for i in range(0, 6): # plotting time series axs1[0, i].set_title('Cluster peak {}\n'.format(coords[i])) axs1[0, i].plot(real_timeseries[:, i], c=colors[i], lw=2)
detrend=True, standardize=True, low_pass=0.1, high_pass=0.01, t_r=2, memory='nilearn_cache', memory_level=1, verbose=2) # Additionally, we pass confound information to ensure our extracted # signal is cleaned from confounds. func_filename = dataset.func[0] confounds_filename = dataset.confounds[0] time_series = masker.fit_transform(func_filename, confounds=[confounds_filename]) ########################################################################## # Display time series # -------------------- import matplotlib.pyplot as plt for time_serie, label in zip(time_series.T, labels): plt.plot(time_serie, label=label) plt.title('Default Mode Network Time Series') plt.xlabel('Scan number') plt.ylabel('Normalized signal') plt.legend() plt.tight_layout()
) brain_masker = NiftiMasker( smoothing_fwhm=6, detrend=True, standardize=True, low_pass=None, high_pass=None, t_r=None, memory='nilearn_cache', memory_level=1, verbose=0, ) # Perform the seed-to-voxel correlation for the LSS 'language' beta series lang_seed_beta_series = seed_masker.fit_transform(lss_beta_maps['language']) lang_beta_series = brain_masker.fit_transform(lss_beta_maps['language']) lang_corrs = np.dot( lang_beta_series.T, lang_seed_beta_series, ) / lang_seed_beta_series.shape[0] language_connectivity_img = brain_masker.inverse_transform(lang_corrs.T) # Perform the seed-to-voxel correlation for the LSS 'string' beta series string_seed_beta_series = seed_masker.fit_transform(lss_beta_maps['string']) string_beta_series = brain_masker.fit_transform(lss_beta_maps['string']) string_corrs = np.dot( string_beta_series.T, string_seed_beta_series, ) / string_seed_beta_series.shape[0] string_connectivity_img = brain_masker.inverse_transform(string_corrs.T)
######################################################################### # Estimate contrasts # ------------------ # Specify the contrasts. seed_masker = NiftiSpheresMasker([pcc_coords], radius=10, detrend=True, standardize=True, low_pass=0.1, high_pass=0.01, t_r=2., memory='nilearn_cache', memory_level=1, verbose=0) seed_time_series = seed_masker.fit_transform(adhd_dataset.func[0]) frametimes = np.linspace(0, (n_scans - 1) * t_r, n_scans) design_matrix = make_first_level_design_matrix(frametimes, hrf_model='spm', add_regs=seed_time_series, add_reg_names=["pcc_seed"]) dmn_contrast = np.array([1] + [0] * (design_matrix.shape[1] - 1)) contrasts = {'seed_based_glm': dmn_contrast} ######################################################################### # Perform first level analysis # ---------------------------- # Setup and fit GLM. first_level_model = FirstLevelModel(t_r=t_r, slice_time_ref=slice_time_ref) first_level_model = first_level_model.fit(run_imgs=adhd_dataset.func[0], design_matrices=design_matrix)
seed_masker = NiftiSpheresMasker(pcc_coords, radius=8, detrend=True, standardize=True, low_pass=0.1, high_pass=0.01, t_r=2, memory='nilearn_cache', memory_level=1, verbose=0) ########################################################################## # Then we extract the mean time series within the seed region while # regressing out the confounds that # can be found in the dataset's csv file seed_time_series = seed_masker.fit_transform(func_filename, confounds=[confound_filename]) ########################################################################## # Next, we can proceed similarly for the **brain-wide voxel-wise time # series**, using :class:`nilearn.maskers.NiftiMasker` with the same input # arguments as in the seed_masker in addition to smoothing with a 6 mm kernel from nilearn.maskers import NiftiMasker brain_masker = NiftiMasker(smoothing_fwhm=6, detrend=True, standardize=True, low_pass=0.1, high_pass=0.01, t_r=2, memory='nilearn_cache', memory_level=1,