# Keep only data corresponding to face or houses condition_mask = np.logical_or(conditions == 'face', conditions == 'house') X = fmri_data[..., condition_mask] y = y[condition_mask] session = session[condition_mask] conditions = conditions[condition_mask] # We have 2 conditions n_conditions = np.size(np.unique(y)) ### Loading step ############################################################## from nisl.io import NiftiMasker from nibabel import Nifti1Image nifti_masker = NiftiMasker(mask=mask, sessions=session, smooth=4) niimg = Nifti1Image(X, affine) X = nifti_masker.fit_transform(niimg) ### Prediction function ####################################################### ### Define the prediction function to be used. # Here we use a Support Vector Classification, with a linear kernel and C=1 from sklearn.svm import SVC clf = SVC(kernel='linear', C=1.) ### Dimension reduction ####################################################### from sklearn.feature_selection import SelectKBest, f_classif ### Define the dimension reduction to be used. # Here we use a classical univariate feature selection based on F-test, # namely Anova. We set the number of features to be selected to 1000
# Keep only data corresponding to face or houses condition_mask = np.logical_or(conditions == 'face', conditions == 'house') X = fmri_data[..., condition_mask] y = y[condition_mask] session = session[condition_mask] conditions = conditions[condition_mask] # We have 2 conditions n_conditions = np.size(np.unique(y)) ### Loading step ############################################################## from nisl.io import NiftiMasker from nibabel import Nifti1Image nifti_masker = NiftiMasker(mask=mask, detrend=True, sessions=session) niimg = Nifti1Image(X, affine) X = nifti_masker.fit_transform(niimg) ### Prediction function ####################################################### ### Define the prediction function to be used. # Here we use a Support Vector Classification, with a linear kernel and C=1 from sklearn.svm import SVC clf = SVC(kernel='linear', C=1.) ### Dimension reduction ####################################################### from sklearn.feature_selection import SelectKBest, f_classif ### Define the dimension reduction to be used. # Here we use a classical univariate feature selection based on F-test, # namely Anova. We set the number of features to be selected to 1000
images by mean on the parcellation. This parcellation may be useful in a supervised learning, see for instance: `A supervised clustering approach for fMRI-based inference of brain states <http://hal.inria.fr/inria-00589201>`_, Michel et al, Pattern Recognition 2011. """ ### Load nyu_rest dataset ##################################################### from nisl import datasets from nisl.io import NiftiMasker dataset = datasets.fetch_nyu_rest(n_subjects=1) nifti_masker = NiftiMasker() fmri_masked = nifti_masker.fit_transform(dataset.func[0]) mask = nifti_masker.mask_.get_data() ### Ward ###################################################################### # Compute connectivity matrix: which voxel is connected to which from sklearn.feature_extraction import image shape = mask.shape connectivity = image.grid_to_graph(n_x=shape[0], n_y=shape[1], n_z=shape[2], mask=mask) # Computing the ward for the first time, this is long... from sklearn.cluster import WardAgglomeration import time start = time.time() ward = WardAgglomeration(n_clusters=500, connectivity=connectivity,
# With scikit-learn >= 0.14, replace this line by: target = labels _, target = sklearn.utils.fixes.unique(labels, return_inverse=True) ### Remove resting state condition ############################################ no_rest_indices = (labels != 'rest') target = target[no_rest_indices] ### Load the mask ############################################################# from nisl.io import NiftiMasker nifti_masker = NiftiMasker(mask=dataset.mask_vt[0]) # We give to the nifti_masker a filename, and retrieve a 2D array ready # for machine learning with scikit-learn fmri_masked = nifti_masker.fit_transform(dataset.func[0]) ### Prediction function ####################################################### # First, we remove rest condition fmri_masked = fmri_masked[no_rest_indices] # Here we use a Support Vector Classification, with a linear kernel and C=1 from sklearn.svm import SVC svc = SVC(kernel='linear', C=1.) # And we run it svc.fit(fmri_masked, target) y_pred = svc.predict(fmri_masked) ### Unmasking #################################################################
cv = KFold(y.size, k=4) import nisl.decoding # The radius is the one of the Searchlight sphere that will scan the volume searchlight = nisl.decoding.SearchLight(mask_img, process_mask_img=process_mask_img, radius=5.6, n_jobs=n_jobs, score_func=score_func, verbose=1, cv=cv) searchlight.fit(fmri_img, y) ### F-scores computation ###################################################### from nisl.io import NiftiMasker nifti_masker = NiftiMasker(mask=mask_img, sessions=session, memory='nisl_cache', memory_level=1) fmri_masked = nifti_masker.fit_transform(fmri_img) from sklearn.feature_selection import f_classif f_values, p_values = f_classif(fmri_masked, y) p_values = -np.log10(p_values) p_values[np.isnan(p_values)] = 0 p_values[p_values > 10] = 10 p_unmasked = nifti_masker.inverse_transform(p_values).get_data() ### Visualization ############################################################# import pylab as pl # Use the fmri mean image as a surrogate of anatomical data mean_fmri = fmri_img.get_data().mean(axis=-1) # Searchlight results