示例#1
0
def main():
    configFile = 'greenEyes.toml'
    cfg = initializeGreenEyes(configFile)
    runData = StructDict()
    runData.cheating_probability = np.zeros((cfg.nStations, ))
    runData.correct_prob = np.zeros((cfg.nStations, ))
    runData.interpretation = getSubjectInterpretation(cfg)
    runData.badVoxels = {}
    runData.dataForClassification = {}
    story_TRs = cfg.story_TR_2 - cfg.story_TR_1
    SKIP = 10
    all_data = np.zeros((cfg.nVox, cfg.nTR_run - SKIP))  # don't need to save
    runData.story_data = np.zeros((cfg.nVox, story_TRs))
    #### MAIN PROCESSING ###
    ## FUNCTION TO OPERATE OVER ALL SCANNING RUNS
    scanNum = 9
    for TRindex in np.arange(cfg.nTR_run - SKIP):
        print('TRindex')
        #for TRindex in np.arange(44):
        TRnum = TRindex + 1 + SKIP  # actual file number to look for
        TRindex_story = TRindex - cfg.story_TR_1
        full_nifti_name = convertToNifti(TRnum, scanNum, cfg)
        registeredFileName = registerNewNiftiToMNI(cfg, full_nifti_name)
        maskedData = apply_mask(registeredFileName, cfg.MASK)
        all_data[:, TRindex] = maskedData
        if TRindex_story >= 0:  # we're at a story TR now
            runData.story_data[:, TRindex_story] = maskedData
            if np.any(TRindex_story == cfg.last_tr_in_station.astype(int)):
                # NOW PREPROCESS AND CLASSIFY
                runData = preprocessAndPredict(cfg, runData, TRindex_story)
        else:
            pass
示例#2
0
def main():
	
	# MAKES STRUCT WITH ALL PARAMETERS IN IT
	argParser = argparse.ArgumentParser()
	argParser.add_argument('--config', '-c', default='greenEyes_organized.toml', type=str,
	                   help='experiment config file (.json or .toml)')
	argParser.add_argument('--runs', '-r', default=None, type=str,
	                   help='Comma separated list of run numbers')
	argParser.add_argument('--scans', '-s', default=None, type=str,
	                   help='Comma separated list of scan number')
	# creates web pipe communication link to send/request responses through web pipe
	argParser.add_argument('--webpipe', '-w', default=None, type=str,
	                   help='Named pipe to communicate with webServer')
	argParser.add_argument('--webfilesremote', '-x', default=False, action='store_true',
	                   help='dicom files retrieved from remote server')
	args = argParser.parse_args()
	params = StructDict({'config': args.config,'runs': args.runs, 'scans': args.scans,
	                 'webpipe': args.webpipe, 'webfilesremote': args.webfilesremote})
	cfg = initializeGreenEyes(params.config,params)

	# initialize file interface class -- for now only local
	fileInterface = FileInterface()
	# intialize watching in particular directory
	fileWatcher.initWatch(cfg.intelrt.imgDir, cfg.intelrt.dicomNamePattern, cfg.minExpectedDicomSize) 
	runData = StructDict()
	runData.cheating_probability = np.zeros((cfg.nStations,))
	runData.correct_prob = np.zeros((cfg.nStations,))
	runData.interpretation = getSubjectInterpretation(cfg)
	runData.badVoxels = {}
	runData.dataForClassification = {}
	story_TRs = cfg.story_TR_2 - cfg.story_TR_1
	SKIP = 10
	all_data = np.zeros((cfg.nVox,cfg.nTR_run)) # don't need to save
	runData.story_data = np.zeros((cfg.nVox,story_TRs))
	#### MAIN PROCESSING ###
	## FUNCTION TO OPERATE OVER ALL SCANNING RUNS
	# LOOP OVER ALL CFG.SCANNUMS
	nRuns = len(cfg.runs)
	for runIndex in np.arange(nRuns):
		
        header = makeRunHeader(cfg,runIndex)
        print(header)
        run = cfg.runs[runIndex]
		scanNum = cfg.scanNums[runIndex]

		storyTRCount = 0
		for TRFilenum in np.arange(SKIP+1,cfg.nTR_run+1):
			##### GET DATA BUFFER FROM LOCAL MACHINE ###
			dicomData = fileInterface.watchfile(getDicomFileName(cfg, scanNum, TRFilenum), timeout=5) # if starts with slash it's full path, if not, it assumes it's the watch directory and builds
			full_nifti_name = convertToNifti(TRFilenum,scanNum,cfg)
			registeredFileName = registerNewNiftiToMNI(cfg,full_nifti_name)
			maskedData = apply_mask(registeredFileName,cfg.mask_filename)
			all_data[:,TRFilenum] = maskedData
			if TRFilenum >= cfg.fileNum_story_TR_1 and TRFilenum <= cfg.fileNum_story_TR_2: # we're at a story TR now
				runData.story_data[:,storyTRCount] = maskedData
				if np.any(storyTRCount == cfg.last_tr_in_station.astype(int)):
					# NOW PREPROCESS AND CLASSIFY
					runData = preprocessAndPredict(cfg,runData,storyTRCount)
                    text_to_save = '{0:05d}'.format(runData.correct_prob[stationInd])
                    file_name_to_save = getStationClassoutputFilename(cfg.sessionId, cfg.run, stationInd)
                    full_filename_to_save = cfg.intelrt.subject_full_day_path + file_name_to_save
					fileInterface.putTextFile(full_filename_to_save,text_to_save)
				storyTRCount += 1
			else: