def gabor_contribution2prf(feat_dir, prf_dir, db_dir, subj_id, roi): """Calculate tunning contribution of each gabor sub-banks.""" # load fmri response vxl_idx, train_fmri_ts, val_fmri_ts = dataio.load_vim1_fmri(db_dir, subj_id, roi=roi) del train_fmri_ts print 'Voxel number: %s' % (len(vxl_idx)) # load candidate models val_models = np.load(os.path.join(feat_dir, 'val_candidate_model.npy'), mmap_mode='r') # load selected model parameters roi_dir = os.path.join(prf_dir, roi) paras = np.load(os.path.join(roi_dir, 'reg_sel_paras.npy')) sel_model = np.load(os.path.join(roi_dir, 'reg_sel_model.npy')) gabor_corr = np.zeros((paras.shape[0], 9)) for i in range(paras.shape[0]): print 'Voxel %s' % (i) # load features feats = np.array(val_models[int(sel_model[i]), ...]).astype(np.float64) feats = zscore(feats.T).T for j in range(9): pred = np.dot(feats[:, (j * 8):(j * 8 + 8)], paras[i, (j * 8):(j * 8 + 8)]) gabor_corr[i, j] = np.corrcoef(pred, val_fmri_ts[i])[0, 1] np.save(os.path.join(roi_dir, 'gabor_contributes.npy'), gabor_corr)
def null_distribution_prf_tunning(feat_dir, prf_dir, db_dir, subj_id, roi): """Generate Null distribution of pRF model tunning using validation data.""" # load fmri response vxl_idx, train_fmri_ts, val_fmri_ts = dataio.load_vim1_fmri(db_dir, subj_id, roi=roi) del train_fmri_ts print 'Voxel number: %s' % (len(vxl_idx)) # load candidate models val_models = np.load(os.path.join(feat_dir, 'val_candidate_model.npy'), mmap_mode='r') # output directory config roi_dir = os.path.join(prf_dir, roi) # load selected model parameters paras = np.load(os.path.join(roi_dir, 'reg_sel_paras.npy')) sel_model = np.load(os.path.join(roi_dir, 'reg_sel_model.npy')) null_corr = np.zeros((paras.shape[0], 1000)) for i in range(paras.shape[0]): print 'Voxel %s' % (i) # load features feats = np.array(val_models[int(sel_model[i]), ...]).astype(np.float64) feats = zscore(feats.T).T pred = np.dot(feats, paras[i]) for j in range(1000): shuffled_val_ts = np.random.permutation(val_fmri_ts[i]) null_corr[i, j] = np.corrcoef(pred, shuffled_val_ts)[0, 1] np.save(os.path.join(roi_dir, 'random_corr.npy'), null_corr)
def prf_selection(feat_dir, prf_dir, db_dir, subj_id, roi): """Select best model for each voxel and validating.""" # load fmri response vxl_idx, train_fmri_ts, val_fmri_ts = dataio.load_vim1_fmri(db_dir, subj_id, roi=roi) del train_fmri_ts print 'Voxel number: %s' % (len(vxl_idx)) # load candidate models val_models = np.load(os.path.join(feat_dir, 'val_candidate_model.npy'), mmap_mode='r') # output directory config roi_dir = os.path.join(prf_dir, roi) # load candidate model parameters paras = np.load(os.path.join(roi_dir, 'reg_paras.npy')) mcorr = np.load(os.path.join(roi_dir, 'reg_model_corr.npy')) alphas = np.load(os.path.join(roi_dir, 'reg_alphas.npy')) sel_paras = np.zeros((mcorr.shape[1], 72)) sel_model = np.zeros(mcorr.shape[1]) sel_model_corr = np.zeros(mcorr.shape[1]) for i in range(mcorr.shape[1]): maxi = np.argmax(np.nan_to_num(mcorr[:, i])) print 'Voxel %s - Max corr %s - Model %s' % (i, mcorr[maxi, i], maxi) print 'Alpha : %s' % (alphas[maxi, i]) sel_paras[i] = paras[maxi, i] sel_model[i] = maxi feats = np.array(val_models[maxi, ...]).astype(np.float64) feats = zscore(feats.T).T pred = np.dot(feats, sel_paras[i]) sel_model_corr[i] = np.corrcoef(pred, val_fmri_ts[i])[0, 1] print 'Val Corr : %s' % (sel_model_corr[i]) np.save(os.path.join(roi_dir, 'reg_sel_paras.npy'), sel_paras) np.save(os.path.join(roi_dir, 'reg_sel_model.npy'), sel_model) np.save(os.path.join(roi_dir, 'reg_sel_model_corr.npy'), sel_model_corr)
def ridge_fitting(feat_dir, prf_dir, db_dir, subj_id, roi): """pRF model fitting using ridge regression. 90% trainning data used for model tuning, and another 10% data used for model seletion. """ # load fmri response vxl_idx, train_fmri_ts, val_fmri_ts = dataio.load_vim1_fmri(db_dir, subj_id, roi=roi) del val_fmri_ts print 'Voxel number: %s' % (len(vxl_idx)) # load candidate models train_models = np.load(os.path.join(feat_dir, 'train_candidate_model.npy'), mmap_mode='r') # output directory config roi_dir = os.path.join(prf_dir, roi) check_path(roi_dir) # model seletion and tuning ALPHA_NUM = 20 BOOTS_NUM = 15 paras_file = os.path.join(roi_dir, 'reg_paras.npy') paras = np.memmap(paras_file, dtype='float64', mode='w+', shape=(42500, len(vxl_idx), 72)) mcorr_file = os.path.join(roi_dir, 'reg_model_corr.npy') mcorr = np.memmap(mcorr_file, dtype='float64', mode='w+', shape=(42500, len(vxl_idx))) alphas_file = os.path.join(roi_dir, 'reg_alphas.npy') alphas = np.memmap(alphas_file, dtype='float64', mode='w+', shape=(42500, len(vxl_idx))) # fMRI data z-score print 'fmri data temporal z-score' m = np.mean(train_fmri_ts, axis=1, keepdims=True) s = np.std(train_fmri_ts, axis=1, keepdims=True) train_fmri_ts = (train_fmri_ts - m) / (1e-10 + s) # split training dataset into model tunning set and model selection set tune_fmri_ts = train_fmri_ts[:, :int(1750 * 0.9)] sel_fmri_ts = train_fmri_ts[:, int(1750 * 0.9):] # model testing for i in range(42500): print 'Model %s' % (i) # remove models which centered outside the 20 degree of visual angle xi = (i % 2500) / 50 yi = (i % 2500) % 50 x0 = np.arange(5, 500, 10)[xi] y0 = np.arange(5, 500, 10)[yi] d = np.sqrt(np.square(x0 - 250) + np.square(y0 - 250)) if d > 249: print 'Model center outside the visual angle' paras[i, ...] = np.NaN mcorr[i] = np.NaN alphas[i] = np.NaN continue train_x = np.array(train_models[i, ...]).astype(np.float64) train_x = zscore(train_x.T).T # split training dataset into model tunning and selection sets tune_x = train_x[:int(1750 * 0.9), :] sel_x = train_x[int(1750 * 0.9):, :] wt, r, alpha, bscores, valinds = ridge.bootstrap_ridge( tune_x, tune_fmri_ts.T, sel_x, sel_fmri_ts.T, alphas=np.logspace(-2, 3, ALPHA_NUM), nboots=BOOTS_NUM, chunklen=175, nchunks=1, single_alpha=False, use_corr=False) paras[i, ...] = wt.T mcorr[i] = r alphas[i] = alpha # save output paras = np.array(paras) np.save(paras_file, paras) mcorr = np.array(mcorr) np.save(mcorr_file, mcorr) alphas = np.array(alphas) np.save(alphas_file, alphas)
def pls_ridge_fitting(feat_dir, prf_dir, db_dir, subj_id, roi): """pRF model fitting using ridge regression. 90% trainning data used for model tuning, and another 10% data used for model seletion. """ # load fmri response fmri_data = np.load(os.path.join(prf_dir, roi, 'pls_residual_fmri.npz')) vxl_idx = fmri_data['vxl_idx'] train_fmri_ts = fmri_data['pls_train_residual'] print 'Voxel number: %s' % (len(vxl_idx)) # load candidate models train_models = np.load(os.path.join(feat_dir, 'train_candidate_model.npy'), mmap_mode='r') # output directory config roi_dir = os.path.join(prf_dir, roi, 'pls_residual') check_path(roi_dir) # model seletion and tuning ALPHA_NUM = 20 BOOTS_NUM = 15 paras_file = os.path.join(roi_dir, 'reg_paras.npy') paras = np.memmap(paras_file, dtype='float64', mode='w+', shape=(15360, len(vxl_idx), 46)) mcorr_file = os.path.join(roi_dir, 'reg_model_corr.npy') mcorr = np.memmap(mcorr_file, dtype='float64', mode='w+', shape=(15360, len(vxl_idx))) alphas_file = os.path.join(roi_dir, 'reg_alphas.npy') alphas = np.memmap(alphas_file, dtype='float64', mode='w+', shape=(15360, len(vxl_idx))) # fMRI data z-score print 'fmri data temporal z-score' m = np.mean(train_fmri_ts, axis=1, keepdims=True) s = np.std(train_fmri_ts, axis=1, keepdims=True) train_fmri_ts = (train_fmri_ts - m) / (1e-10 + s) # split training dataset into model tunning set and model selection set tune_fmri_ts = train_fmri_ts[:, :int(7200 * 0.9)] sel_fmri_ts = train_fmri_ts[:, int(7200 * 0.9):] # model testing for i in range(15360): print 'Model %s' % (i) train_x = np.array(train_models[i, ...]).astype(np.float64) train_x = zscore(train_x).T # split training dataset into model tunning and selection sets tune_x = train_x[:int(7200 * 0.9), :] sel_x = train_x[int(7200 * 0.9):, :] wt, r, alpha, bscores, valinds = ridge.bootstrap_ridge( tune_x, tune_fmri_ts.T, sel_x, sel_fmri_ts.T, alphas=np.logspace(-2, 3, ALPHA_NUM), nboots=BOOTS_NUM, chunklen=720, nchunks=1, single_alpha=False, use_corr=False) paras[i, ...] = wt.T mcorr[i] = r alphas[i] = alpha # save output paras = np.array(paras) np.save(paras_file, paras) mcorr = np.array(mcorr) np.save(mcorr_file, mcorr) alphas = np.array(alphas) np.save(alphas_file, alphas)
print 'Kernel %s' % (i + 1) # load CNN features modulated by Gaussian kernels if i / 550 > file_idx: train_feat_file = os.path.join( feat_dir, 'gaussian_kernels', 'gaussian_conv1_train_trs_%s.npy' % (i / 550)) train_feat_ts = np.load(train_feat_file) val_feat_file = os.path.join( feat_dir, 'gaussian_kernels', 'gaussian_conv1_val_trs_%s.npy' % (i / 550)) val_feat_ts = np.load(val_feat_file) file_idx = i / 550 train_x = train_feat_ts[..., i % 550] val_x = val_feat_ts[..., i % 550] # shape of x : (96, 7200/540) train_x = zscore(train_x).T val_x = zscore(val_x).T # output vars paras = np.zeros((96, 30250, len(vxl_idx))) val_corr = np.zeros((30250, len(vxl_idx))) alphas = np.zeros((30250, len(vxl_idx))) for j in range(len(vxl_idx)): print 'Voxel %s' % (j + 1) train_y = train_fmri_ts[j] val_y = val_fmri_ts[j] lasso_cv = LassoCV(cv=10, n_jobs=4) lasso_cv.fit(train_x, train_y) alphas[i, j] = lasso_cv.alpha_ paras[:, i, j] = lasso_cv.coef_ pred_y = lasso_cv.predict(val_x) val_corr[i, j] = np.corrcoef(val_y, pred_y)[0][1]