def get_hue_selectivity(prf_dir, db_dir, subj_id, roi): """Get hue tunning curve for each voxel and calculate hue selectivity.""" # load fmri response vxl_idx, train_fmri_ts, val_fmri_ts = dataio.load_vim2_fmri(db_dir, subj_id, roi=roi) del train_fmri_ts del val_fmri_ts print 'Voxel number: %s' % (len(vxl_idx)) # pRF estimate roi_dir = os.path.join(prf_dir, roi) sel_paras = np.load(os.path.join(roi_dir, 'reg_sel_paras.npy')) sel_model_corr = np.load(os.path.join(roi_dir, 'reg_sel_model_corr.npy')) hue_tunes = np.zeros((len(vxl_idx), 201)) hue_sel = np.zeros(len(vxl_idx)) for i in range(len(vxl_idx)): print 'Voxel %s, Val Corr %s' % (i, sel_model_corr[i]) paras = sel_paras[i] # get hue selection hue_tunes[i] = para2hue(paras[40:]) hue_sel[i] = abs(hue_tunes[i].max() - hue_tunes[i].min()) if sel_model_corr[i] >= 0.25: hue_file = os.path.join( roi_dir, 'Voxel_%s_%s_hue.png' % (i + 1, vxl_idx[i])) vutil.save_hue(hue_tunes[i], hue_file) np.save(os.path.join(roi_dir, 'hue_tunes.npy'), hue_tunes) np.save(os.path.join(roi_dir, 'hue_selectivity.npy'), hue_sel)
def get_prediction_residual(prf_dir, db_dir, subj_id): roi_list = ['v1rh', 'v1lh', 'v2rh', 'v2lh', 'v3rh', 'v3lh', 'v4rh', 'v4lh'] orig_fmri = None pred_fmri = None res_fmri = None vxl_idx = None for roi in roi_list: idx, tx, vx = dataio.load_vim2_fmri(db_dir, subj_id, roi) m = tx.mean(axis=1, keepdims=True) s = tx.std(axis=1, keepdims=True) roi_pred_file = os.path.join(prf_dir, roi, 'train_pred_norm_fmri.npy') roi_pred_fmri = np.load(roi_pred_file) roi_pred_fmri = roi_pred_fmri * s + m res = tx - roi_pred_fmri if not isinstance(res_fmri, np.ndarray): orig_fmri = tx pred_fmri = roi_pred_fmri res_fmri = res vxl_idx = idx else: orig_fmri = np.vstack((orig_fmri, tx)) pred_fmri = np.vstack((pred_fmri, roi_pred_fmri)) res_fmri = np.vstack((res_fmri, res)) vxl_idx = np.concatenate((vxl_idx, idx)) outfile = os.path.join(prf_dir, 'roi_coding_fmri') np.savez(outfile, orig_fmri=orig_fmri, pred_fmri=pred_fmri, res_fmri=res_fmri, vxl_idx=vxl_idx)
def gabor_contribution2prf(feat_dir, prf_dir, db_dir, subj_id, roi): """Calculate tunning contribution of each gabor sub-banks.""" # load fmri response vxl_idx, train_fmri_ts, val_fmri_ts = dataio.load_vim2_fmri(db_dir, subj_id, roi=roi) del train_fmri_ts print 'Voxel number: %s' % (len(vxl_idx)) # load candidate models val_models = np.load(os.path.join(feat_dir, 'val_candidate_model.npy'), mmap_mode='r') # load selected model parameters roi_dir = os.path.join(prf_dir, roi) paras = np.load(os.path.join(roi_dir, 'reg_sel_paras.npy')) sel_model = np.load(os.path.join(roi_dir, 'reg_sel_model.npy')) gabor_corr = np.zeros((paras.shape[0], 5)) for i in range(paras.shape[0]): print 'Voxel %s' % (i) # load features feats = np.array(val_models[int(sel_model[i]), ...]).astype(np.float64) feats = zscore(feats).T for j in range(5): pred = np.dot(feats[:, (j * 8):(j * 8 + 8)], paras[i, (j * 8):(j * 8 + 8)]) gabor_corr[i, j] = np.corrcoef(pred, val_fmri_ts[i])[0, 1] np.save(os.path.join(roi_dir, 'gabor_contributes.npy'), gabor_corr)
def stimuli_recon(prf_dir, db_dir, subj_id, roi): """Reconstruct stimulus based on pRF model.""" # load fmri response vxl_idx, train_fmri_ts, val_fmri_ts = dataio.load_vim2_fmri(db_dir, subj_id, roi=roi) del train_fmri_ts print 'Voxel number: %s' % (len(vxl_idx)) # load model parameters roi_dir = os.path.join(prf_dir, roi) val_corr = np.load(os.path.join(roi_dir, 'reg_sel_model_corr.npy')) filters = np.load(os.path.join(roi_dir, 'filters.npy')) recon_imgs = np.zeros((val_fmri_ts.shape[1], 128, 128)) # fMRI data z-score print 'fmri data temporal z-score' m = np.mean(val_fmri_ts, axis=1, keepdims=True) s = np.std(val_fmri_ts, axis=1, keepdims=True) val_fmri_ts = (val_fmri_ts - m) / (1e-10 + s) # select significant predicted voxels sel_vxls = np.nonzero(val_corr >= 0.17)[0] for i in range(val_fmri_ts.shape[1]): print 'Reconstruct stimilus %s' % (i + 1) tmp = np.zeros((128, 128)) for j in sel_vxls: tmp += val_fmri_ts[int(j), int(i)] * filters[j] recon_imgs[i] = tmp np.save(os.path.join(roi_dir, 'recon_img.npy'), recon_imgs)
def null_distribution_prf_tunning(feat_dir, prf_dir, db_dir, subj_id, roi): """Generate Null distribution of pRF model tunning using validation data.""" # load fmri response vxl_idx, train_fmri_ts, val_fmri_ts = dataio.load_vim2_fmri(db_dir, subj_id, roi=roi) del train_fmri_ts print 'Voxel number: %s' % (len(vxl_idx)) # load candidate models val_models = np.load(os.path.join(feat_dir, 'val_candidate_model.npy'), mmap_mode='r') # output directory config roi_dir = os.path.join(prf_dir, roi) # load selected model parameters paras = np.load(os.path.join(roi_dir, 'reg_sel_paras.npy')) sel_model = np.load(os.path.join(roi_dir, 'reg_sel_model.npy')) null_corr = np.zeros((paras.shape[0], 1000)) for i in range(paras.shape[0]): print 'Voxel %s' % (i) # load features feats = np.array(val_models[int(sel_model[i]), ...]).astype(np.float64) feats = zscore(feats).T pred = np.dot(feats, paras[i]) for j in range(1000): shuffled_val_ts = np.random.permutation(val_fmri_ts[i]) null_corr[i, j] = np.corrcoef(pred, shuffled_val_ts)[0, 1] np.save(os.path.join(roi_dir, 'random_corr.npy'), null_corr)
def prf_selection(feat_dir, prf_dir, db_dir, subj_id, roi): """Select best model for each voxel and validating.""" # load fmri response vxl_idx, train_fmri_ts, val_fmri_ts = dataio.load_vim2_fmri(db_dir, subj_id, roi=roi) del train_fmri_ts print 'Voxel number: %s' % (len(vxl_idx)) # load candidate models val_models = np.load(os.path.join(feat_dir, 'val_candidate_model.npy'), mmap_mode='r') # output directory config roi_dir = os.path.join(prf_dir, roi) # load candidate model parameters paras = np.load(os.path.join(roi_dir, 'reg_paras.npy')) mcorr = np.load(os.path.join(roi_dir, 'reg_model_corr.npy')) alphas = np.load(os.path.join(roi_dir, 'reg_alphas.npy')) sel_paras = np.zeros((mcorr.shape[1], 46)) sel_model = np.zeros(mcorr.shape[1]) sel_model_corr = np.zeros(mcorr.shape[1]) for i in range(mcorr.shape[1]): maxi = np.argmax(mcorr[:, i]) print 'Voxel %s - Max corr %s - Model %s' % (i, mcorr[maxi, i], maxi) print 'Alpha : %s' % (alphas[maxi, i]) sel_paras[i] = paras[maxi, i] sel_model[i] = maxi feats = np.array(val_models[maxi, ...]).astype(np.float64) feats = zscore(feats).T pred = np.dot(feats, sel_paras[i]) sel_model_corr[i] = np.corrcoef(pred, val_fmri_ts[i])[0, 1] print 'Val Corr : %s' % (sel_model_corr[i]) np.save(os.path.join(roi_dir, 'reg_sel_paras.npy'), sel_paras) np.save(os.path.join(roi_dir, 'reg_sel_model.npy'), sel_model) np.save(os.path.join(roi_dir, 'reg_sel_model_corr.npy'), sel_model_corr)
def filter_recon(prf_dir, db_dir, subj_id, roi): """Reconstruct filter map of each voxel based on selected model.""" # load fmri response vxl_idx, train_fmri_ts, val_fmri_ts = dataio.load_vim2_fmri(db_dir, subj_id, roi=roi) del train_fmri_ts del val_fmri_ts print 'Voxel number: %s' % (len(vxl_idx)) # output config roi_dir = os.path.join(prf_dir, roi) # pRF estimate sel_models = np.load(os.path.join(roi_dir, 'reg_sel_model.npy')) sel_paras = np.load(os.path.join(roi_dir, 'reg_sel_paras.npy')) sel_model_corr = np.load(os.path.join(roi_dir, 'reg_sel_model_corr.npy')) filters = np.zeros((sel_models.shape[0], 128, 128)) fig_dir = os.path.join(roi_dir, 'filters') check_path(fig_dir) thr = 0.17 # gabor bank generation gwt = bob.ip.gabor.Transform() gwt.generate_wavelets(128, 128) spatial_gabors = np.zeros((40, 128, 128)) for i in range(40): w = bob.ip.gabor.Wavelet(resolution=(128, 128), frequency=gwt.wavelet_frequencies[i]) sw = bob.sp.ifft(w.wavelet.astype(np.complex128)) spatial_gabors[i, ...] = np.roll(np.roll(np.real(sw), 64, 0), 64, 1) for i in range(sel_models.shape[0]): if sel_model_corr[i] < thr: continue print 'Voxel %s, Val Corr %s' % (i, sel_model_corr[i]) model_idx = int(sel_models[i]) # get gaussian pooling field parameters si = model_idx / 1024 xi = (model_idx % 1024) / 32 yi = (model_idx % 1024) % 32 x0 = np.arange(0, 128, 4)[xi] y0 = np.arange(0, 128, 4)[yi] s = np.linspace(1, 50, 15)[si] kernel = make_2d_gaussian(128, s, center=(x0, y0)) kpos = np.nonzero(kernel) paras = sel_paras[i] for gwt_idx in range(40): wt = paras[gwt_idx] arsw = spatial_gabors[gwt_idx] for p in range(kpos[0].shape[0]): tmp = img_offset(arsw, (kpos[0][p], kpos[1][p])) filters[i] += wt * kernel[kpos[0][p], kpos[1][p]] * tmp if sel_model_corr[i] >= thr: im_file = os.path.join(fig_dir, 'Voxel_%s_%s.png' % (i + 1, vxl_idx[i])) vutil.save_imshow(filters[i], im_file) np.save(os.path.join(roi_dir, 'filters.npy'), filters)
def merge_roi_data(prf_dir, db_dir, subj_id): roi_list = ['v1rh', 'v1lh', 'v2rh', 'v2lh', 'v3rh', 'v3lh', 'v4rh', 'v4lh'] train_ts = None val_ts = None vxl_idx = None for roi in roi_list: idx, tx, vx = dataio.load_vim2_fmri(db_dir, subj_id, roi) if not isinstance(train_ts, np.ndarray): train_ts = tx val_ts = vx vxl_idx = idx else: train_ts = np.vstack((train_ts, tx)) val_ts = np.vstack((val_ts, vx)) vxl_idx = np.concatenate((vxl_idx, idx)) outfile = os.path.join(prf_dir, 'roi_orig_fmri') np.savez(outfile, train_ts=train_ts, val_ts=val_ts, vxl_idx=vxl_idx)
def prf_recon(prf_dir, db_dir, subj_id, roi): """Reconstruct pRF based on selected model.""" # load fmri response vxl_idx, train_fmri_ts, val_fmri_ts = dataio.load_vim2_fmri(db_dir, subj_id, roi=roi) del train_fmri_ts del val_fmri_ts print 'Voxel number: %s' % (len(vxl_idx)) # output directory config roi_dir = os.path.join(prf_dir, roi) # pRF estimate sel_models = np.load(os.path.join(roi_dir, 'reg_sel_model.npy')) sel_paras = np.load(os.path.join(roi_dir, 'reg_sel_paras.npy')) sel_model_corr = np.load(os.path.join(roi_dir, 'reg_sel_model_corr.npy')) prfs = np.zeros((sel_models.shape[0], 128, 128)) fig_dir = os.path.join(roi_dir, 'figs') check_path(fig_dir) for i in range(sel_models.shape[0]): # get pRF print 'Voxel %s, Val Corr %s' % (i, sel_model_corr[i]) model_idx = int(sel_models[i]) # get gaussian pooling field parameters si = model_idx / 1024 xi = (model_idx % 1024) / 32 yi = (model_idx % 1024) % 32 x0 = np.arange(0, 128, 4)[xi] y0 = np.arange(0, 128, 4)[yi] s = np.linspace(1, 50, 15)[si] #kernel = make_cycle(128, s, center=(x0, y0)) kernel = make_2d_gaussian(128, s, center=(x0, y0)) kpos = np.nonzero(kernel) paras = sel_paras[i] for f in range(5): fwt = np.sum(paras[(f * 8):(f * 8 + 8)]) fs = np.sqrt(2)**f * 4 for p in range(kpos[0].shape[0]): tmp = make_2d_gaussian(128, fs, center=(kpos[1][p], kpos[0][p])) prfs[i] += fwt * kernel[kpos[0][p], kpos[1][p]] * tmp if sel_model_corr[i] >= 0.25: prf_file = os.path.join(fig_dir, 'Voxel_%s_%s.png' % (i + 1, vxl_idx[i])) vutil.save_imshow(prfs[i], prf_file) np.save(os.path.join(roi_dir, 'prfs.npy'), prfs)
def get_pls_residual(pls_dir, prf_dir, db_dir, subj_id): pls_pred_fmri = np.load(os.path.join(pls_dir, 'pls_pred_fmri_c10.npz')) train_pred_fmri = pls_pred_fmri['pred_train'].T print train_pred_fmri.shape val_pred_fmri = pls_pred_fmri['pred_val'].T orig_fmri_data = np.load(os.path.join(prf_dir, 'roi_orig_fmri.npz')) vxl_idx = orig_fmri_data['vxl_idx'] # ROI list roi_list = ['v1rh', 'v1lh', 'v2rh', 'v2lh', 'v3rh', 'v3lh', 'v4rh', 'v4lh'] for roi in roi_list: roi_idx, roi_tx, roi_vx = dataio.load_vim2_fmri(db_dir, subj_id, roi) print roi_tx.shape sel_idx = [i for i in range(len(vxl_idx)) if vxl_idx[i] in roi_idx] roi_train_pred = train_pred_fmri[sel_idx] roi_val_pred = val_pred_fmri[sel_idx] print roi_train_pred.shape res_train = roi_tx - roi_train_pred res_val = roi_vx - roi_val_pred outfile = os.path.join(prf_dir, roi, 'pls_residual_fmri') np.savez(outfile, pls_train_residual=res_train, pls_val_residual=res_val, vxl_idx=roi_idx)
def get_vxl_idx_in_rect(prf_dir, db_dir, subj_id, rmin, rmax, cmin, cmax): """Get voxel idx whose pRF within the specific rect.""" roi_list = ['v1rh', 'v1lh'] vxl_idx = [] rect = np.zeros((128, 128)) rect[rmin:(rmax + 1), cmin:(cmax + 1)] = 1 for roi in roi_list: print roi idx, tx, vx = dataio.load_vim2_fmri(db_dir, subj_id, roi) del tx del vx print 'Number of voxel idx: %s' % (len(idx)) roi_dir = os.path.join(prf_dir, roi) corr = np.load(os.path.join(roi_dir, 'reg_sel_model_corr.npy')) prfs = np.load(os.path.join(roi_dir, 'prfs.npy')) prfs = np.abs(prfs) for i in range(len(idx)): if corr[i] >= 0.25: x = prfs[i] > 0.00005 ratio = np.sum(x * rect) / x.sum() if ratio > 0.5: vxl_idx.append(idx[i]) return vxl_idx
def ridge_fitting(feat_dir, prf_dir, db_dir, subj_id, roi): """pRF model fitting using ridge regression. 90% trainning data used for model tuning, and another 10% data used for model seletion. """ # load fmri response vxl_idx, train_fmri_ts, val_fmri_ts = dataio.load_vim2_fmri(db_dir, subj_id, roi=roi) del val_fmri_ts print 'Voxel number: %s' % (len(vxl_idx)) # load candidate models train_models = np.load(os.path.join(feat_dir, 'train_candidate_model.npy'), mmap_mode='r') # output directory config roi_dir = os.path.join(prf_dir, roi) check_path(roi_dir) # model seletion and tuning ALPHA_NUM = 20 BOOTS_NUM = 15 paras_file = os.path.join(roi_dir, 'reg_paras.npy') paras = np.memmap(paras_file, dtype='float64', mode='w+', shape=(15360, len(vxl_idx), 46)) mcorr_file = os.path.join(roi_dir, 'reg_model_corr.npy') mcorr = np.memmap(mcorr_file, dtype='float64', mode='w+', shape=(15360, len(vxl_idx))) alphas_file = os.path.join(roi_dir, 'reg_alphas.npy') alphas = np.memmap(alphas_file, dtype='float64', mode='w+', shape=(15360, len(vxl_idx))) # fMRI data z-score print 'fmri data temporal z-score' m = np.mean(train_fmri_ts, axis=1, keepdims=True) s = np.std(train_fmri_ts, axis=1, keepdims=True) train_fmri_ts = (train_fmri_ts - m) / (1e-10 + s) # split training dataset into model tunning set and model selection set tune_fmri_ts = train_fmri_ts[:, :int(7200 * 0.9)] sel_fmri_ts = train_fmri_ts[:, int(7200 * 0.9):] # model testing for i in range(15360): print 'Model %s' % (i) train_x = np.array(train_models[i, ...]).astype(np.float64) train_x = zscore(train_x).T # split training dataset into model tunning and selection sets tune_x = train_x[:int(7200 * 0.9), :] sel_x = train_x[int(7200 * 0.9):, :] wt, r, alpha, bscores, valinds = ridge.bootstrap_ridge( tune_x, tune_fmri_ts.T, sel_x, sel_fmri_ts.T, alphas=np.logspace(-2, 3, ALPHA_NUM), nboots=BOOTS_NUM, chunklen=720, nchunks=1, single_alpha=False, use_corr=False) paras[i, ...] = wt.T mcorr[i] = r alphas[i] = alpha # save output paras = np.array(paras) np.save(paras_file, paras) mcorr = np.array(mcorr) np.save(mcorr_file, mcorr) alphas = np.array(alphas) np.save(alphas_file, alphas)