def glass_brain(r2_voxels, subject, current_ROI, ROI_name, name): """ Input : Masked results of r2score Take masked data and project it again in a 3D space Ouput : 3D glassbrain of r2score """ # Get one mask and fit it to the corresponding ROI if current_ROI != -1 and current_ROI <= 5: masks_ROIs_filenames = sorted( glob.glob(os.path.join(paths.path2Data, "en/ROIs_masks/", "*.nii"))) ROI_mask = masks_ROIs_filenames[current_ROI] ROI_mask = NiftiMasker(ROI_mask, detrend=True, standardize=True) ROI_mask.fit() unmasked_data = ROI_mask.inverse_transform(r2_voxels) # Get masks and fit a global mask else: masks = [] masks_filenames = sorted( glob.glob( os.path.join(paths.path2Data, "en/fmri_data/masks", "sub_{}".format(subject), "resample*.pkl"))) for file in masks_filenames: with open(file, 'rb') as f: mask = pickle.load(f) masks.append(mask) global_mask = math_img('img>0.5', img=mean_img(masks)) masker = MultiNiftiMasker(global_mask, detrend=True, standardize=True) masker.fit() unmasked_data = masker.inverse_transform(r2_voxels) display = plot_glass_brain(unmasked_data, display_mode='lzry', threshold='auto', colorbar=True, title='Sub_{}'.format(subject)) if not os.path.exists( os.path.join(paths.path2Figures, 'glass_brain', 'Sub_{}'.format(subject), ROI_name)): os.makedirs( os.path.join(paths.path2Figures, 'glass_brain', 'Sub_{}'.format(subject), ROI_name)) display.savefig( os.path.join(paths.path2Figures, 'glass_brain', 'Sub_{}'.format(subject), ROI_name, 'R_squared_test_{}.png'.format(name))) print( 'Figure Path : ', os.path.join(paths.path2Figures, 'glass_brain', 'Sub_{}'.format(subject), ROI_name, 'R_squared_test_{}.png'.format(name))) display.close()
def _make_test_data(n_subjects=8, noisy=False): rng = np.random.RandomState(0) shape = (20, 20, 1) components = _make_components(shape) if noisy: # Creating noisy non positive data components[rng.randn(*components.shape) > .8] *= -5. for component in components: assert (component.max() <= -component.min()) # Goal met ? # Create a "multi-subject" dataset data = _make_data_from_components(components, n_subjects=n_subjects) affine = np.eye(4) mask_img = nibabel.Nifti1Image(np.ones(shape, dtype=np.int8), affine) masker = MultiNiftiMasker(mask_img).fit() init = components + 1 * rng.randn(*components.shape) components = masker.inverse_transform(components) init = masker.inverse_transform(init) data = masker.inverse_transform(data) return data, mask_img, components, init
] components = masker.transform(components_imgs) proj, proj_inv, rec = make_projection_matrix(components, scale_bases=True) dump(rec, join(get_output_dir(), 'benchmark', 'rec.pkl')) def load_rec(): return load(join(get_output_dir(), 'benchmark', 'rec.pkl')) # compute_rec() exp_dirs = join(get_output_dir(), 'single_exp', '8') models = [] rec = load_rec() mask_img = fetch_mask() masker = MultiNiftiMasker(mask_img=mask_img).fit() for exp_dir in [exp_dirs]: estimator = load(join(exp_dirs, 'estimator.pkl')) transformer = load(join(exp_dirs, 'transformer.pkl')) print([(dataset, this_class) for dataset, lbin in transformer.lbins_.items() for this_class in lbin.classes_]) coef = estimator.coef_ coef_rec = coef.dot(rec) print(join(exp_dirs, 'maps.nii.gz')) imgs = masker.inverse_transform(coef_rec) imgs.to_filename(join(exp_dirs, 'maps.nii.gz')) plot_stat_map(index_img(imgs, 10)) plt.show()
# we compute how much variance our encoding model explains in each voxel scores.append(r2_score(fmri_data[test], predictions, multioutput='raw_values')) ############################################################################## # Mapping the encoding scores on the brain # ---------------------------------------- # To plot the scores onto the brain, we create a Nifti1Image containing # the scores and then threshold it: from nilearn.image import threshold_img cut_score = np.mean(scores, axis=0) cut_score[cut_score < 0] = 0 # bring the scores into the shape of the background brain score_map_img = masker.inverse_transform(cut_score) thresholded_score_map_img = threshold_img(score_map_img, threshold=1e-6) ############################################################################## # Plotting the statistical map on a background brain, we mark four voxels # which we will inspect more closely later on. from nilearn.plotting import plot_stat_map from nilearn.image.resampling import coord_transform def index_to_xy_coord(x, y, z=10): '''Transforms data index to coordinates of the background + offset''' coords = coord_transform(x, y, z, affine=thresholded_score_map_img.get_affine()) return np.array(coords)[np.newaxis, :] + np.array([0, 1, 0])
data.nips2017_components256 ] else: components_imgs = [ data.positive_components16, data.positive_components64, data.positive_components512 ] components = masker.transform(components_imgs) proj, inv_proj, rec = memory.cache(make_projection_matrix)( components, scale_bases=True) dict_learning = DictFact(code_alpha=2, comp_pos=True, n_components=20) U, S, VT = svd(maps, full_matrices=False) dict_learning.fit(maps.T) comp = dict_learning.components_.dot(proj.T) comp_img = masker.inverse_transform(comp) comp_img.to_filename(join(analysis_dir, 'comp.nii.gz')) maps = maps.T.dot(proj.T) maps_img = masker.inverse_transform(maps) transformer = load(join(artifact_dir, 'transformer.pkl')) classes = np.concatenate([lbin.classes_ for lbin in transformer.lbins_]) maps_img.to_filename(join(analysis_dir, 'maps.nii.gz')) Parallel(n_jobs=3, verbose=10)(delayed(plot_map)(index_img(maps_img, i), classes[i], i) for i in range(maps_img.shape[3])) Parallel(n_jobs=3, verbose=10)(delayed(plot_map)(index_img(comp_img, i), 'map_%i' % i, i)
mask = (target_img.get_data()[..., 0] != 0).astype(int) mask_img = nibabel.Nifti1Image(mask, target_img.affine) nibabel.save(mask_img, MASKFILE) else: mask_img = nibabel.load(MASKFILE) # Mask and preproc EPI data masker = MultiNiftiMasker(mask_img=mask_img, standardize=True) masker.fit() if not os.path.isfile(DATAFILE): y = np.concatenate(masker.transform(func_filenames), axis=0) print(y.shape) np.save(DATAFILE, y) else: y = np.load(DATAFILE) iterator = masker.inverse_transform(y).get_fdata() iterator = iterator.transpose((3, 0, 1, 2)) iterator = np.expand_dims(iterator, axis=1) print(iterator.shape) # Data iterator manager = DataManager.from_numpy(train_inputs=iterator, batch_size=BATCH_SIZE, add_input=True) # Create model name = "ResAENet" model_weights = os.path.join(WORKDIR, "checkpoint_" + name, "model_0_epoch_{0}.pth".format(EPOCH)) if os.path.isfile(model_weights): pretrained = model_weights
# we compute how much variance our encoding model explains in each voxel scores.append( r2_score(fmri_data[test], predictions, multioutput='raw_values')) ############################################################################## # Mapping the encoding scores on the brain # ---------------------------------------- # To plot the scores onto the brain, we create a Nifti1Image containing # the scores and then threshold it: from nilearn.image import threshold_img cut_score = np.mean(scores, axis=0) cut_score[cut_score < 0] = 0 # bring the scores into the shape of the background brain score_map_img = masker.inverse_transform(cut_score) thresholded_score_map_img = threshold_img(score_map_img, threshold=1e-6) ############################################################################## # Plotting the statistical map on a background brain, we mark four voxels # which we will inspect more closely later on. from nilearn.plotting import plot_stat_map from nilearn.image import coord_transform def index_to_xy_coord(x, y, z=10): '''Transforms data index to coordinates of the background + offset''' coords = coord_transform(x, y, z, affine=thresholded_score_map_img.affine) return np.array(coords)[np.newaxis, :] + np.array([0, 1, 0])
############################################################################### #2- Testing ############################################################################### idx = {} for g in ['AD', 'LMCI', 'EMCI', 'Normal']: idx[g] = np.where(dx_group == g) groups = [['AD', 'Normal'], ['AD', 'EMCI'], ['AD', 'LMCI'], ['EMCI', 'LMCI'], ['EMCI', 'Normal'], ['LMCI', 'Normal']] for gr in groups: test_var = np.ones((len(func_files), 1), dtype=float) # intercept neg_log_pvals_rpbi, _, _ = randomized_parcellation_based_inference( test_var, func_masked, # + intercept as a covariate by default np.asarray(masker.mask_img_.get_data()).astype(bool), n_parcellations=100, # 30 for the sake of time, 100 is recommended n_parcels=500, threshold=0, n_perm=10000, # 1,000 for the sake of time. 10,000 is recommended memory=CACHE_DIR, n_jobs=20, verbose=False) neg_log_pvals_rpbi_unmasked = masker.inverse_transform( np.ravel(neg_log_pvals_rpbi)) p_path = os.path.join(CACHE_DIR, 'figures', 'pmap_rpbi_'+gr[0]+'_'+gr[1]+'_baseline_fmri_adni') neg_log_pvals_rpbi_unmasked.to_filename(p_path+'.nii.gz') break
memory_level=1) masker.fit(dataset.func) affine = masker.mask_img_.get_affine() if not os.path.exists('canica.nii.gz'): t0 = time.time() canica = CanICA(n_components=n_components, mask=masker, smoothing_fwhm=6., memory="nilearn_cache", memory_level=1, threshold=None, random_state=1) canica.fit(dataset.func) print('Canica: %f' % (time.time() - t0)) canica_components = masker.inverse_transform(canica.components_) nibabel.save(canica_components, 'canica.nii.gz') if not os.path.exists('canica.pdf'): cc = nibabel.load('canica.nii.gz') c = cc.get_data()[:, :, :, 16] vmax = np.max(np.abs(c[:, :, 37])) c = np.ma.masked_array( c, np.logical_not(masker.mask_img_.get_data().astype(bool))) pl.imshow(np.rot90(c[:, :, 37]), interpolation='nearest', vmax=vmax, vmin=-vmax, cmap='jet') pl.savefig('canica.pdf')
masker = MultiNiftiMasker(smoothing_fwhm=6., memory="nilearn_cache", memory_level=1) masker.fit(dataset.func) affine = masker.mask_img_.get_affine() if not os.path.exists('canica.nii.gz'): t0 = time.time() canica = CanICA(n_components=n_components, mask=masker, smoothing_fwhm=6., memory="nilearn_cache", memory_level=1, threshold=None, random_state=1) canica.fit(dataset.func) print('Canica: %f' % (time.time() - t0)) canica_components = masker.inverse_transform(canica.components_) nibabel.save(canica_components, 'canica.nii.gz') if not os.path.exists('canica.pdf'): cc = nibabel.load('canica.nii.gz') c = cc.get_data()[:, :, :, 16] vmax = np.max(np.abs(c[:, :, 37])) c = np.ma.masked_array(c, np.logical_not(masker.mask_img_.get_data().astype(bool))) pl.imshow(np.rot90(c[:, :, 37]), interpolation='nearest', vmax=vmax, vmin=-vmax, cmap='jet') pl.savefig('canica.pdf') pl.savefig('canica.eps') smooth_masked = masker.transform(dataset.func)
'subject', 'session', ]), data=filenames, columns=['filename']) records.sort_index(inplace=True) masker = MultiNiftiMasker(mask_img=mask_gm, n_jobs=1).fit() r2s = masker.transform(records['filename'].values) r2s = np.concatenate(r2s, axis=0) r2s[r2s < 0] = 0 r2s = pd.DataFrame(data=r2s, index=records.index) r2s.sort_index(inplace=True) ref = r2s.loc['none', 'none'].values.ravel()[:, None] imgs = [masker.inverse_transform(r2) for _, r2 in r2s.iterrows()] records['nifti'] = imgs subject, session = 4, 3 imgs_to_plot = records['nifti'].loc[pd.IndexSlice[:, :, subject, session]] cut_coords = -55, -38, 2 vmax = r2s.loc[pd.IndexSlice[:, :, subject, session], :].values.max() fig, axes = plt.subplots(len(imgs_to_plot), 2, figsize=(12, 28), gridspec_kw={'width_ratios': [1, 2]}) voxels = check_random_state(0).permutation(ref.ravel().shape[0])[:1000] for i, (index, img) in enumerate(imgs_to_plot.items()): r2 = r2s.loc[index].values.ravel()[:, None]