def main(): logger = logging.getLogger() logger.setLevel(logLevel) logging.info('GREEN EYES: first log message!') argParser = argparse.ArgumentParser() argParser.add_argument('--config', '-c', default=defaultConfig, type=str, help='experiment config file (.json or .toml)') argParser.add_argument('--runs', '-r', default='', type=str, help='Comma separated list of run numbers') argParser.add_argument('--scans', '-s', default='', type=str, help='Comma separated list of scan number') argParser.add_argument('--deleteTmpNifti', '-d', default='1', type=str, help='Set to 0 if rerunning during a single scanning after error') # creates pipe communication link to send/request responses through pipe argParser.add_argument('--commpipe', '-q', default=None, type=str, help='Named pipe to communicate with projectInterface') argParser.add_argument('--filesremote', '-x', default=False, action='store_true', help='dicom files retrieved from remote server') args = argParser.parse_args() print(args) cfg = initializeGreenEyes(args.config,args) # DELETE ALL FILES IF FLAGGED TO # if args.deleteTmpNifti == '1': deleteTmpFiles(cfg) else: print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!') print('NOT DELETING NIFTIS IN tmp/convertedNiftis') print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!') # DELETE ALL FILES IF FLAGGED TO # # comm pipe projComm = projUtils.initProjectComm(args.commpipe,args.filesremote) # initialize file interface class -- for now only local fileInterface = FileInterface(filesremote=args.filesremote, commPipes=projComm) # intialize watching in particular directory fileInterface.initWatch(cfg.dicomDir, cfg.dicomNamePattern, cfg.minExpectedDicomSize) story_TRs = cfg.story_TR_2 - cfg.story_TR_1 + 1 #### MAIN PROCESSING ### nRuns = len(cfg.runNum) for runIndex in np.arange(nRuns): runData = StructDict() runData.cheating_probability = np.zeros((cfg.nStations,)) runData.zTransferred = np.zeros((cfg.nStations,)) runData.correct_prob = np.zeros((cfg.nStations,)) runData.interpretation = getSubjectInterpretation(cfg) runData.badVoxels = {} runData.dataForClassification = {} all_data = np.zeros((cfg.nVox,cfg.nTR_run + 1)) # adding 1 because we're not starting at 0 with the indexing runData.story_data = np.zeros((cfg.nVox,story_TRs)) makeRunHeader(cfg,runIndex) run = cfg.runNum[runIndex] scanNum = cfg.scanNum[runIndex] storyTRCount = 0 stationInd=0 for TRFilenum in np.arange(cfg.nTR_skip+1,cfg.nTR_run+1): # for TRFilenum in np.arange(11,54): if TRFilenum == cfg.nTR_skip+1: # wait until run starts timeout_file = 180 else: timeout_file = 5 A = time.time() dicomData = readRetryDicomFromFileInterface(fileInterface, getDicomFileName(cfg, scanNum, TRFilenum), timeout=timeout_file) full_nifti_name = convertToNifti(TRFilenum,scanNum,cfg,dicomData) registeredFileName = registerNewNiftiToMNI(cfg,full_nifti_name) maskedData = apply_mask(registeredFileName,cfg.mask_filename) all_data[:,TRFilenum] = maskedData B = time.time() print('read to mask time: {:5f}'.format(B-A)) if TRFilenum >= cfg.fileNum_story_TR_1 and TRFilenum <= cfg.fileNum_story_TR_2: # we're at a story TR now runData.story_data[:,storyTRCount] = maskedData if np.any(storyTRCount == cfg.last_tr_in_station.astype(int)): # NOW PREPROCESS AND CLASSIFY stationInd = np.argwhere(storyTRCount == cfg.last_tr_in_station.astype(int))[0][0] A = time.time() runData = preprocessAndPredict(cfg,runData,storyTRCount) B = time.time() print('preprocessAndPredict time: {:5f}'.format(B-A)) text_to_save = '{0:05f}'.format(runData.correct_prob[stationInd]) file_name_to_save = getStationClassoutputFilename(run, stationInd) if cfg.mode == 'cloud': full_filename_to_save = os.path.join(cfg.intelrt.subject_full_day_path,file_name_to_save) else: full_filename_to_save = os.path.join(cfg.subject_full_day_path,file_name_to_save) fileInterface.putTextFile(full_filename_to_save,text_to_save) if args.commpipe: # JUST TO PLOT ON WEB SERVER projUtils.sendResultToWeb(projComm, run,int(stationInd) ,runData.correct_prob[stationInd] ) storyTRCount += 1 TRheader = makeTRHeader(cfg,runIndex,TRFilenum,storyTRCount-1,stationInd,runData.correct_prob[stationInd]) # SAVE OVER RUN NP FILE runData.scanNum = scanNum # save scanning number runData.subjectName = cfg.subjectName runData.dicomDir = cfg.dicomDir run_filename = getRunFilename(cfg.sessionId, run) full_run_filename_to_save = os.path.join(cfg.subject_full_day_path,run_filename) #try: sio.savemat(full_run_filename_to_save, runData, appendmat=False) #except Exception as err: # errorReply = self.createReplyMessage(msg, MsgResult.Errsor) # errorReply.data = "Error: Unable to save blkGrpFile %s: %r" % (blkGrpFilename, err) # return errorReply # DELETE ALL FILES IF FLAGGED TO # # REPEAT AT THE END OF THE RUN AS WELL if args.deleteTmpNifti == '1': deleteTmpFiles(cfg) else: print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!') print('NOT DELETING NIFTIS IN tmp/convertedNiftis') print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!') # DELETE ALL FILES IF FLAGGED TO # sys.exit(0)
def main(): logger = logging.getLogger() logger.setLevel(logLevel) logging.info('amygActivation: first log message!') argParser = argparse.ArgumentParser() argParser.add_argument('--config', '-c', default=defaultConfig, type=str, help='experiment config file (.json or .toml)') argParser.add_argument('--runs', '-r', default='', type=str, help='Comma separated list of run numbers') argParser.add_argument('--scans', '-s', default='', type=str, help='Comma separated list of scan number') argParser.add_argument( '--deleteTmpNifti', '-d', default='1', type=str, help='Set to 0 if rerunning during a single scanning after error') args = argParser.parse_args() # Initialize the RPC connection to the projectInterface # This will give us a dataInterface for retrieving files and # a subjectInterface for giving feedback clientInterface = ClientInterface() dataInterface = clientInterface.dataInterface subjInterface = clientInterface.subjInterface webInterface = clientInterface.webInterface args.dataRemote = dataInterface.isRunningRemote() cfg = utils.loadConfigFile(args.config) cfg = initialize(cfg, args) # DELETE ALL FILES IF FLAGGED (DEFAULT) # if args.deleteTmpNifti == '1': deleteTmpFiles(cfg, args) else: print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!') print('NOT DELETING NIFTIS IN tmp/convertedNiftis') print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!') createTmpFolder(cfg, args) #### MAIN PROCESSING ### nRuns = len(cfg.runNum) for runIndex in np.arange(nRuns): # Steps that we have to do: # 1. load run regressor X - ** make run regressor that has TRs - # 2. find the happy face trials (happy) X # 3. find the rest TRs right before each one X # At every TR --> register to MNI, mask, etc # 4. zscore previous rest data (convert + register like before) # 5. calculate percent signal change over ROI # 6. save as a text file (Every TR-- display can smooth it) runNum = cfg.runNum[ runIndex] # this will be 1-based now!! it will be the actual run number in case it's out of order runId = makeRunHeader(cfg, args, runIndex) run = cfg.runNum[runIndex] # create run folder runFolder = createRunFolder(cfg, args, runNum) scanNum = cfg.scanNum[runIndex] regressor = makeRunReg(cfg, args, dataInterface, runNum, runFolder, saveMat=1) # intialize data stream dicomScanNamePattern = utils.stringPartialFormat( cfg.dicomNamePattern, 'SCAN', scanNum) streamId = dataInterface.initScannerStream(cfg.dicomDir, dicomScanNamePattern, cfg.minExpectedDicomSize) happy_TRs = findConditionTR(regressor, int(cfg.HAPPY)) happy_TRs_shifted = happy_TRs + cfg.nTR_shift happy_TRs_shifted_filenum = happy_TRs_shifted + cfg.nTR_skip # to account for first 10 files that we're skipping happy_blocks = list(split_tol(happy_TRs_shifted, 1)) TR_per_block = cfg.nTR_block fixation_TRs = findConditionTR(regressor, int(cfg.REST)) fixation_TRs_shifted = fixation_TRs + cfg.nTR_shift fixation_blocks = list(split_tol(fixation_TRs_shifted, 1)) runData = StructDict() runData.all_data = np.zeros( (cfg.nVox[cfg.useMask], cfg.nTR_run - cfg.nTR_skip)) runData.percent_change = np.zeros((cfg.nTR_run - cfg.nTR_skip, )) runData.percent_change[:] = np.nan runData.badVoxels = np.array([]) TRindex = 0 for TRFilenum in np.arange(cfg.nTR_skip + 1, cfg.nTR_run + 1): # iterate through all TRs if TRFilenum == cfg.nTR_skip + 1: # wait until run starts timeout_file = 180 else: timeout_file = 5 A = time.time() dicomFilename = dicomScanNamePattern.format(TR=TRFilenum) print(f'Get Dicom: {dicomFilename}') dicomData = dataInterface.getImageData(streamId, int(TRFilenum), timeout_file) if dicomData is None: print('Error: getImageData returned None') return full_nifti_name = convertToNifti(cfg, args, TRFilenum, scanNum, dicomData) print(full_nifti_name) print(cfg.MASK_transformed[cfg.useMask]) maskedData = apply_mask(full_nifti_name, cfg.MASK_transformed[cfg.useMask]) runData.all_data[:, TRindex] = maskedData B = time.time() print('read to mask time: {:5f}'.format(B - A)) if TRindex in happy_TRs_shifted: # we're at a happy block # now take previous fixation block for z scoring this_block = [ b for b in np.arange(4) if TRindex in happy_blocks[b] ][0] fixation_this_block = fixation_blocks[this_block] avg_activity, runData = getAvgSignal(fixation_this_block, runData, TRindex, cfg) runData.percent_change[TRindex] = calculatePercentChange( avg_activity, runData.all_data[:, TRindex]) text_to_save = '{0:05f}'.format( runData.percent_change[TRindex]) file_name_to_save = getOutputFilename( run, TRFilenum) # save as the actual file number, not index # now we want to always send this back to the local computer running the display full_file_name_to_save = os.path.join( cfg.local.subject_full_day_path, runId, file_name_to_save) # Send classification result back to the console computer try: dataInterface.putFile(full_file_name_to_save, text_to_save) except Exception as err: print('Error putFile: ' + str(err)) return # JUST TO PLOT ON WEB SERVER subjInterface.setResult(run, int(TRFilenum), float(runData.percent_change[TRindex])) webInterface.plotDataPoint( run, int(TRFilenum), float(runData.percent_change[TRindex])) TRheader = makeTRHeader(cfg, runIndex, TRFilenum, TRindex, runData.percent_change[TRindex]) TRindex += 1 # SAVE OVER RUN runData.scanNum = scanNum # save scanning number runData.subjectName = cfg.subjectName runData.dicomDir = cfg.dicomDir run_filename = getRunFilename(cfg.sessionId, run) full_run_filename_to_save = os.path.join(runFolder, run_filename) sio.savemat(full_run_filename_to_save, runData, appendmat=False) sys.exit(0)
def main(): logger = logging.getLogger() logger.setLevel(logLevel) logging.info('Face matching: first log message!') argParser = argparse.ArgumentParser() argParser.add_argument('--config', '-c', default=defaultConfig, type=str, help='experiment config file (.json or .toml)') argParser.add_argument('--runs', '-r', default='', type=str, help='Comma separated list of run numbers') argParser.add_argument('--scans', '-s', default='', type=str, help='Comma separated list of scan number') argParser.add_argument( '--deleteTmpNifti', '-d', default='1', type=str, help='Set to 0 if rerunning during a single scanning after error') # creates pipe communication link to send/request responses through web pipe argParser.add_argument( '--commpipe', '-q', default=None, type=str, help='Named pipe to communicate with projectInterface') argParser.add_argument('--filesremote', '-x', default=False, action='store_true', help='dicom files retrieved from remote server') args = argParser.parse_args() print(args) cfg = initializeFaceMatching(args.config, args) # DELETE ALL FILES IF FLAGGED TO # if args.deleteTmpNifti == '1': deleteTmpFiles(cfg) else: print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!') print('NOT DELETING NIFTIS IN tmp/convertedNiftis') print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!') # DELETE ALL FILES IF FLAGGED TO # # comm pipe projComm = projUtils.initProjectComm(args.commpipe, args.filesremote) fileInterface = FileInterface(filesremote=args.filesremote, commPipes=projComm) # intialize watching in particular directory fileInterface.initWatch(cfg.dicomDir, cfg.dicomNamePattern, cfg.minExpectedDicomSize) #### MAIN PROCESSING ### nRuns = len(cfg.runNum) for runIndex in np.arange(nRuns): # Steps that we have to do: # 1. load run regressor X # 2. find the emotional face trials (happy) X # 3. find the rest TRs right before each one X # At every TR --> register to MNI, mask, etc # 4. zscore previous rest data (convert + register like before) # 5. calculate percent signal change over ROI # 6. save as a text file (Every TR-- display can smooth it) # LOAD RUN REGRESSOR runNum = runIndex + 1 regressor = getRegressorMatrix(cfg, runNum) happy_TRs = findConditionTR(regressor, int(cfg.HAPPY)) # 3 blocks 12 TRs each happy_TRs_shifted = happy_TRs + cfg.nTR_shift happy_TRs_shifted_filenum = happy_TRs_shifted + cfg.nTR_skip # to account for first 2 files that we're skipping neutral_TRs = findConditionTR(regressor, int(cfg.NEUTRAL)) neutral_TRs_shifted = neutral_TRs + cfg.nTR_shift object_TRs = findConditionTR(regressor, int(cfg.OBJECT)) object_TRs_shifted = object_TRs + cfg.nTR_shift nBlocks = np.shape(happy_TRs)[0] nTR_per_block = np.shape(happy_TRs)[1] fixation_TRs, fixation_blocks = findFixationTR(regressor) fixation_TRs_shifted = fixation_TRs + cfg.nTR_shift fixation_blocks_shifted = fixation_blocks + cfg.nTR_shift all_other_categories_shifted = np.concatenate( (neutral_TRs_shifted, object_TRs_shifted, fixation_blocks_shifted), axis=0).flatten() runData = StructDict() runData.all_data = np.zeros((cfg.nVox, cfg.nTR_run - cfg.nTR_skip)) runData.percent_change = np.zeros((cfg.nTR_run - cfg.nTR_skip, )) runData.percent_change[:] = np.nan runData.badVoxels = np.array([]) makeRunHeader(cfg, runIndex) run = cfg.runNum[runIndex] scanNum = cfg.scanNum[runIndex] TRindex = 0 for TRFilenum in np.arange(cfg.nTR_skip + 1, cfg.nTR_run + 1): # iterate through all TRs if TRFilenum == cfg.nTR_skip + 1: # wait until run starts timeout_file = 180 else: timeout_file = 5 A = time.time() dicomData = readRetryDicomFromFileInterface(fileInterface, getDicomFileName( cfg, scanNum, TRFilenum), timeout=timeout_file) full_nifti_name = convertToNifti(TRFilenum, scanNum, cfg, dicomData) registeredFileName = registerNewNiftiToMNI(cfg, full_nifti_name) maskedData = apply_mask(registeredFileName, cfg.mask_filename) runData.all_data[:, TRindex] = maskedData B = time.time() print('read to mask time: {:5f}'.format(B - A)) if TRindex in happy_TRs_shifted: # we're at a happy block # now get TRs to use for zscoring TRs_to_use_other_categories = np.sort( all_other_categories_shifted[ all_other_categories_shifted < TRindex]) avg_activity, runData = getAvgSignal( TRs_to_use_other_categories, runData, TRindex, cfg) runData.percent_change[TRindex] = calculatePercentChange( avg_activity, runData.all_data[:, TRindex]) text_to_save = '{0:05f}'.format( runData.percent_change[TRindex]) file_name_to_save = getOutputFilename(run, TRindex) if cfg.mode == 'cloud': full_filename_to_save = os.path.join( cfg.intelrt.subject_full_day_path, file_name_to_save) else: full_filename_to_save = os.path.join( cfg.subject_full_day_path, file_name_to_save) fileInterface.putTextFile(full_filename_to_save, text_to_save) if args.commpipe: # JUST TO PLOT ON WEB SERVER projUtils.sendResultToWeb(projComm, run, int(TRindex), runData.percent_change[TRindex]) TRheader = makeTRHeader(cfg, runIndex, TRFilenum, TRindex, runData.percent_change[TRindex]) TRindex += 1 # SAVE OVER RUN NP FILE runData.scanNum = scanNum # save scanning number runData.subjectName = cfg.subjectName runData.dicomDir = cfg.dicomDir run_filename = getRunFilename(cfg.sessionId, run) full_run_filename_to_save = os.path.join(cfg.subject_full_day_path, run_filename) #try: sio.savemat(full_run_filename_to_save, runData, appendmat=False) #except Exception as err: # errorReply = self.createReplyMessage(msg, MsgResult.Errsor) # errorReply.data = "Error: Unable to save blkGrpFile %s: %r" % (blkGrpFilename, err) # return errorReply sys.exit(0)