def gen_module_roi(root_dir): """Make ROI file based on Power264 atlas.""" roi_info = open('sel_emotion_rois.csv').readlines() roi_info.pop(0) roi_info = [line.strip().split(',') for line in roi_info] mask = np.zeros((91, 109, 91)) for line in roi_info: i = int((90.0 - float(line[3])) / 2) j = int((float(line[4]) + 126) / 2) k = int((float(line[5]) + 72) / 2) label = int(line[1]) for n_x in range(i-2, i+3): for n_y in range(j-2, j+3): for n_z in range(k-2, k+3): try: if mask[n_x, n_y, n_z]>0: mask[n_x, n_y, n_z] = 1000 else: mask[n_x, n_y, n_z] = label except: pass mask[mask==1000] = 0 mni_vol = os.path.join(os.environ['FSL_DIR'], 'data', 'standard', 'MNI152_T1_2mm_brain.nii.gz') aff = nib.load(mni_vol).affine outfile ='sel_emotion_modules.nii.gz' nibase.save2nifti(mask, aff, outfile)
def get_mean_cope(root_dir, subj): # dir config work_dir = os.path.join(root_dir, 'workshop', 'searchlight') # load nii data list print 'Load nii files ...' cope_list = get_subj_cope_list(root_dir, subj) # get trial sequence info print 'Load trial sequence info ...' tag_list = get_subj_cope_tag(root_dir, subj) for i in range(10): mean_cope = np.zeros((91, 109, 91, 4)) copes = cope_list[i] tag = np.array(tag_list[i][:72]) for c in range(4): cond_idx = tag == (c + 1) cond_cope = copes[..., cond_idx] mean_cope[..., c] = np.mean(cond_cope, axis=3) # save to nifti fsl_dir = os.getenv('FSL_DIR') template_file = os.path.join(fsl_dir, 'data', 'standard', 'MNI152_T1_2mm_brain.nii.gz') aff = nib.load(template_file).affine nibase.save2nifti( mean_cope, aff, os.path.join(work_dir, subj + '_mean_copes_%s.nii.gz' % (i + 1)))
def get_mean_emo_activation(root_dir, subj): """Get mean activation for each emotion category based on single run's data. """ work_dir = os.path.join(root_dir, 'workshop', 'searchlight') # load nii data list print 'Load nii files ...' act_list = get_subj_act_list(root_dir, subj) # get trial sequence info print 'Load trial sequence info ...' tag_list = get_subj_trial_seq(root_dir, subj) for i in range(10): mean_act = np.zeros((91, 109, 91, 4)) act = act_list[i] tag = tag_list[i] for c in range(4): tmp_idx = [] for k in tag: if tag[k][1]==(c+1): tmp_idx = tmp_idx + tag[k][0] tmp_act = act[..., np.array(tmp_idx)] mean_act[..., c] = np.mean(tmp_act, axis=3) # save to nifti fsl_dir = os.getenv('FSL_DIR') template_file = os.path.join(fsl_dir, 'data', 'standard', 'MNI152_T1_2mm_brain.nii.gz') aff = nib.load(template_file).affine nibase.save2nifti(mean_act, aff, os.path.join(work_dir, subj+'_mean_act_%s.nii.gz'%(i+1)))
def mat2nii(mat_structure, data_name, template_file, out_file): """save `data_name` from `mat_structure` to nii file. `data_name` can be one of `r2_train`, `r2_val`, `beta_train`, `beta_val` and `hrfs`. """ data = mat_structure[data_name] img = nib.load(template_file) aff = img.affine save2nifti(data, aff, out_file)
def save2nifti(data, output_file): """ Save a nifti file. """ fsl_dir = os.getenv('FSL_DIR') template_file = os.path.join(fsl_dir, 'data', 'standard', 'MNI152_T1_2mm_brain.nii.gz') header = nib.load(template_file).get_header() mybase.save2nifti(data, header, output_file)
def random_svm_cope_searchlight(root_dir, subj): """SVM based searchlight analysis.""" # dir config work_dir = os.path.join(root_dir, 'workshop', 'searchlight') # read mask file print 'Load mask data ...' mask_file = os.path.join(work_dir, 'mask', 'func_mask.nii.gz') mask_data = nib.load(mask_file).get_data() mask_data = mask_data > 0 # load nii data list print 'Load nii files ...' cope_list = get_subj_cope_list(root_dir, subj) # get trial sequence info print 'Load trial sequence info ...' tag_list = get_subj_cope_tag(root_dir, subj) for i in range(100): # svm results var clf_results = np.zeros((91, 109, 91, 4)) # for loop for voxel-wise searchlight mask_coord = niroi.get_roi_coord(mask_data) ccount = 0 for c in mask_coord: ccount += 1 print ccount cube_roi = np.zeros((91, 109, 91)) cube_roi = niroi.cube_roi(cube_roi, c[0], c[1], c[2], 2, 1) cube_coord = niroi.get_roi_coord(cube_roi) [train_x, train_y, test_x, test_y] = get_roi_cope_mvps(cope_list, tag_list, cube_coord) clf = svm.SVC(kernel='sigmoid') train_y = np.random.permutation(train_y) clf.fit(train_x, train_y) pred = clf.predict(test_x) for e in range(4): acc = np.sum(pred[test_y == (e + 1)] == (e + 1)) * 1.0 / np.sum(test_y == (e + 1)) print acc clf_results[c[0], c[1], c[2], e] = acc # save to nifti fsl_dir = os.getenv('FSL_DIR') template_file = os.path.join(fsl_dir, 'data', 'standard', 'MNI152_T1_2mm_brain.nii.gz') aff = nib.load(template_file).affine nibase.save2nifti( clf_results, aff, os.path.join( work_dir, 'random_' + subj + '_svm_acc_cope_%s.nii.gz' % (i + 1)))
def z2r(): """ Do a Fisher r-to-z transform. """ source_dir = r'/nfs/h1/workingshop/huanglijie/uni_mul_analysis/multivariate' input = os.path.join(source_dir, 'beh_corr', 'rmet', 'merged_data.nii.gz') output = os.path.join(source_dir, 'beh_corr', 'rmet', 'merged_data_r2z.nii.gz') data = nib.load(input).get_data() header = nib.load(input).get_header() one_ele = np.sum(data == 1) print one_ele data[data==1] = 0.999999 data[data==-1] = -0.999999 one_ele = np.sum(data == 1) print one_ele zdata = np.log((1+data)/(1-data)) / 2 nibase.save2nifti(zdata, header, output)
def refine_rois(root_dir): """Refine ROIs.""" orig_roi_file = os.path.join(root_dir, 'group-level', 'rois', 'neurosynth', 'merged_hfdn_mask_Tmax_s2_lmax_roi_orig.nii.gz') roi_info_file = os.path.join(root_dir, 'group-level', 'rois', 'neurosynth', 'new_neurosynth_roi_info.csv') roi_info = open(roi_info_file, 'r').readlines() roi_info = [line.strip().split(',') for line in roi_info] roi_info.pop(0) # refine rois orig_roi = nib.load(orig_roi_file).get_data() new_roi = np.zeros_like(orig_roi) for line in roi_info: oid = int(line[1]) nid = int(line[0]) new_roi[orig_roi==oid] = nid # save file new_roi_file = os.path.join(root_dir, 'group-level', 'rois', 'neurosynth', 'merged_hfdn_mask_Tmax_s2_lmax_roi.nii.gz') aff = nib.load(orig_roi_file).affine nibase.save2nifti(new_roi, aff, new_roi_file)
def power264roi(root_dir): """Make ROI file based on Power264 atlas.""" roi_info = open('power264.csv').readlines() roi_info.pop(0) roi_info = [line.strip().split(',') for line in roi_info] roi_dict = {} roi_label_dict = {} for line in roi_info: if not line[5] in roi_dict: roi_dict[line[5]] = {} roi_label_dict[line[5]] = {} i = int((90.0 - int(line[2])) / 2) j = int((int(line[3]) + 126) / 2) k = int((int(line[4]) + 72) / 2) roi_dict[line[5]][int(line[0])] = [i, j, k] roi_label_dict[line[5]][int(line[0])] = line[1] # create cube roi based on center coord #for m in roi_dict: # centers = roi_dict[m] # mask = np.zeros((91, 109, 91)) # for c in centers: # mask = niroi.cube_roi(mask, centers[c][0], centers[c][1], # centers[c][2], 2, c) # mni_vol = os.path.join(os.environ['FSL_DIR'], 'data', 'standard', # 'MNI152_T1_2mm_brain.nii.gz') # aff = nib.load(mni_vol).affine # outfile ='power264_%s_rois.nii.gz'%(m.replace('/','-').replace(' ','-')) # nibase.save2nifti(mask, aff, outfile) #sel_module = ['Salience', 'Visual', 'Subcortical', # 'Cingulo-opercular Task Control', 'Default mode', # 'Fronto-parietal Task Control'] sel_module = roi_dict.keys() froi = open('power264_roi.csv', 'w') froi.write('RID,FSL_label,X,Y,Z,Module\n') count = 1 mask = np.zeros((91, 109, 91)) for m in sel_module: centers = roi_dict[m] labels = roi_label_dict[m] for c in centers: x = centers[c][0] y = centers[c][1] z = centers[c][2] for n_x in range(x-2, x+3): for n_y in range(y-2, y+3): for n_z in range(z-2, z+3): try: if mask[n_x, n_y, n_z]>0: mask[n_x, n_y, n_z] = 1000 else: mask[n_x, n_y, n_z] = count except: pass #mask = niroi.cube_roi(mask, centers[c][0], centers[c][1], # centers[c][2], 2, count) froi.write(','.join([str(count), labels[c], str(centers[c][0]), str(centers[c][1]), str(centers[c][2]), m])+'\n') count += 1 froi.close() mask[mask==1000] = 0 mni_vol = os.path.join(os.environ['FSL_DIR'], 'data', 'standard', 'MNI152_T1_2mm_brain.nii.gz') aff = nib.load(mni_vol).affine outfile ='power264_rois.nii.gz' nibase.save2nifti(mask, aff, outfile)