def getPatternsData(subjectNum,runNum): bids_id = 'sub-{0:03d}'.format(subjectNum) ses_id = 'ses-{0:02d}'.format(2) filename = '/jukebox/norman/amennen/RT_prettymouth/data/intelData/{0}/{1}/patternsData_r{2}_*.mat'.format(bids_id,ses_id,runNum) fn = glob.glob(filename)[-1] data = loadMatFile(fn) return data
def getBehavData(subjectNum,runNum): bids_id = 'sub-{0:03d}'.format(subjectNum) ses_id = 'ses-{0:02d}'.format(2) run_id = 'run-{0:03d}'.format(runNum) filename = '/jukebox/norman/amennen/RT_prettymouth/data/intelData/{0}/{1}/{2}/behavior_run{3}_*.mat'.format(bids_id,ses_id,run_id,runNum) fn = glob.glob(filename)[-1] data = loadMatFile(fn) return data
def getCorrectProbability(cfg): nRuns = int(cfg.totalNumberRuns) all_correct_prob = np.zeros((nRuns, 7)) for r in np.arange(nRuns): fileStr = '{0}/patternsData_r{1}*'.format(cfg.subject_full_day_path, r + 1) run_pat = glob.glob(fileStr)[-1] run_data = loadMatFile(run_pat) all_correct_prob[r, :] = run_data.correct_prob[0, :] return all_correct_prob
def getPatternsData(subject_num, run_num): bids_id = 'sub-{0:03d}'.format(subject_num) ses_id = 'ses-{0:02d}'.format(2) filename = '/jukebox/norman/amennen/RT_prettymouth/data/intelData/{0}/{1}/patternsData_r{2}_*.mat'.format( bids_id, ses_id, run_num) fn = glob.glob(filename)[-1] data = loadMatFile(fn) cheating_prob = data['cheating_probability'] cheating_prob_z = data['zTransferred'] correct_score = data['correct_prob'] return data, cheating_prob, cheating_prob_z, correct_score
def test_loadMatlabFile(self, testStruct, matTestFilename): print("Test LoadMatlabFile") struct2 = utils.loadMatFile(matTestFilename) assert testStruct.__name__ == struct2.__name__ res = vutils.compareMatStructs(testStruct, struct2) assert vutils.isMeanWithinThreshold(res, 0) with open(matTestFilename, 'rb') as fp: data = fp.read() struct3 = utils.loadMatFileFromBuffer(data) res = vutils.compareMatStructs(testStruct, struct3) assert vutils.isMeanWithinThreshold(res, 0)
# now load the same information cmap=plt.get_cmap('cool') colors=cmap(np.linspace(0,1,nStations)) brainiak_path='/jukebox/norman/amennen/github/brainiak/rt-cloud/projects/greenEyes/data/' # now load newestfile sys.path.append('/jukebox/norman/amennen/github/brainiak/rt-cloud') from rtCommon.utils import findNewestFile, loadMatFile s = 0 subject_path = brainiak_path + 'sub-' + str(allSubjects[s]) + '/' + 'ses-02' + '/' run = 1 filePattern = 'patternsData_r{}*'.format(run) fn = findNewestFile(subject_path,filePattern) test_data = loadMatFile(fn) test_prob = test_data.correct_prob x = correct_prob[:,run-1,s] y = test_prob[0,:] corr = sstats.pearsonr(x,y)[0] plt.figure(figsize=(10,10)) for st in np.arange(nStations): plt.plot(x[st],y[st], '.', ms=20, color=colors[st],label=st) plt.plot([0,1],[0,1], '--', color='r', lw=3) plt.title('Subj %i, Run %i, Total corr = %3.3f' % (allSubjects[s],run,corr)) plt.xlim([0,1]) plt.ylim([0,1]) plt.xlabel('Offline prediction') plt.legend()
sys.path.append('/home/amennen/code/rt-cloud') # OR FOR INTELRT sys.path.append('/Data1/code/rt-cloud/') from rtCommon.utils import loadConfigFile, dateStr30, DebugLevels, writeFile, loadMatFile from rtCommon.readDicom import readDicomFromBuffer from rtCommon.fileClient import FileInterface import rtCommon.webClientUtils as wcutils from rtCommon.structDict import StructDict import rtCommon.dicomNiftiHandler as dnh import greenEyes subject=102 #conf='/home/amennen/code/rt-cloud/projects/greenEyes/conf/greenEyes_organized.local.toml' conf = '/Data1/code/rt-cloud/projects/greenEyes/conf/greenEyes_organized.toml' args = StructDict() args.config=conf args.runs = '1' args.scans = '5' args.webpipe = None args.filesremote = False cfg = greenEyes.initializeGreenEyes(args.config,args) r = 0 fileStr = '{0}/patternsData_r{1}*'.format(cfg.subject_full_day_path,r+1) run_pat = glob.glob(fileStr)[-1] run_data = loadMatFile(run_pat) # check classifier modelfn = '/home/amennen/utils/greenEyes_clf/UPPERRIGHT_stationInd_0_ROI_1_AVGREMOVE_1_filter_0_k1_0_k2_25.sav' modelfn = '/Data1/code/utils_greenEyes/greenEyes_clf/UPPERRIGHT_stationInd_0_ROI_1_AVGREMOVE_1_filter_0_k1_0_k2_25.sav'' loaded_model = pickle.load(open(modelfn, 'rb'))
def getRegressorMatrix(cfg, runNum): run_id = 'run-{0:02d}'.format(runNum) matrix_file = cfg.subject_full_day_path + '/' + run_id + '/' + 'Regressors_unshifted_Rm2TR.mat' regressor_matrix = loadMatFile(matrix_file) reg_mat = regressor_matrix['REGRESSOR_MATRIX'] return reg_mat