def webPipeCallback(request): if 'cmd' not in request: raise StateError('handleFifoRequests: cmd field not in request: {}'.format(request)) cmd = request['cmd'] route = request.get('route') response = StructDict({'status': 200}) if route == 'dataserver': try: response = RtAttenWeb.webServer.sendDataMsgFromThread(request, timeout=10) if response is None: raise StateError('handleFifoRequests: Response None from sendDataMessage') if 'status' not in response: raise StateError('handleFifoRequests: status field missing from response: {}'.format(response)) if response['status'] not in (200, 408): if 'error' not in response: raise StateError('handleFifoRequests: error field missing from response: {}'.format(response)) RtAttenWeb.webServer.setUserError(response['error']) logging.error('handleFifo status {}: {}'.format(response['status'], response['error'])) except Exception as err: errStr = 'SendDataMessage Exception type {}: error {}:'.format(type(err), str(err)) response = {'status': 400, 'error': errStr} RtAttenWeb.webServer.setUserError(errStr) logging.error('handleFifo Excpetion: {}'.format(errStr)) raise err else: if cmd == 'webCommonDir': response.filename = CommonOutputDir elif cmd == 'classificationResult': try: predict = request['value'] runId = request.get('runId') # predict has {'catsep': val, 'vol': val} catsep = predict.get('catsep') vol = predict.get('vol') # Test for NaN by comparing value to itself, if not equal then it is NaN if catsep is not None and catsep == catsep: image_b64Str = RtAttenWeb.createFeedbackImage(vol, catsep) cmd = {'cmd': 'subjectDisplay', 'data': image_b64Str} RtAttenWeb.webServer.sendSubjMsgFromThread(json.dumps(cmd)) # also update clinician window # change classification value range to 0 to 1 instead of -1 to 1 # classVal = (catsep + 1) / 2 cmd = {'cmd': 'classificationResult', 'classVal': catsep, 'vol': vol, 'runId': runId} RtAttenWeb.webServer.sendUserMsgFromThread(json.dumps(cmd)) except Exception as err: errStr = 'SendClassification Exception type {}: error {}:'.format(type(err), str(err)) response = {'status': 400, 'error': errStr} RtAttenWeb.webServer.setUserError(errStr) logging.error('handleFifo Excpetion: {}'.format(errStr)) raise err elif cmd == 'subjectDisplay': # forward to subject window color = request.get('bgcolor') if color is not None: image_b64Str = RtAttenWeb.createSolidColorImage(color) cmd = {'cmd': 'subjectDisplay', 'data': image_b64Str} RtAttenWeb.webServer.sendSubjMsgFromThread(json.dumps(cmd)) else: RtAttenWeb.webServer.sendSubjMsgFromThread(request) return response
def clientWebpipeCmd(webpipes, cmd): '''Send a web request using named pipes to the web server for handling. This allows a separate client process to make requests of the web server process. It writes the request on fd_out and recieves the reply on fd_in. ''' webpipes.fd_out.write(json.dumps(cmd) + os.linesep) msg = webpipes.fd_in.readline() if len(msg) == 0: # fifo closed raise StateError('WebPipe closed') response = json.loads(msg) retVals = StructDict() decodedData = None if 'status' not in response: raise StateError( 'clientWebpipeCmd: status not in response: {}'.format(response)) retVals.statusCode = response['status'] if retVals.statusCode == 200: # success if 'filename' in response: retVals.filename = response['filename'] if 'data' in response: decodedData = b64decode(response['data']) if retVals.filename is None: raise StateError('clientWebpipeCmd: filename field is None') retVals.data = formatFileData(retVals.filename, decodedData) elif retVals.statusCode not in (200, 408): raise RequestError('WebRequest error: status {}: {}'.format( retVals.statusCode, response['error'])) return retVals
def webSubjCallback(client, message): if RtAttenWeb.initialized is not True: raise StateError('webUserCallback: RtAttenWeb not initialized') request = json.loads(message) cmd = request['cmd'] logging.log(DebugLevels.L3, "WEB SUBJ CMD: %s", cmd) print('Subject Callback: {}'.format(cmd))
def parseDicomVolume(dicomImg, sliceDim): '''The raw dicom file will be a 2D picture with multiple slices tiled together. We need to separate the slices and form a volume from them. ''' sliceWidth = sliceDim sliceHeight = sliceDim image = dicomImg.pixel_array dicomHeight, dicomWidth = image.shape numSlicesPerRow = dicomWidth // sliceWidth numSlicesPerCol = dicomHeight // sliceHeight max_slices = numSlicesPerRow * numSlicesPerCol volume = np.full((sliceWidth, sliceHeight, max_slices), np.nan) sliceNum = 0 for row in range(numSlicesPerCol): for col in range(numSlicesPerRow): if sliceNum >= max_slices: raise StateError( 'parseDicomVolume: sliceNum {} exceeds max_slices {}'. format(sliceNum, max_slices)) rpos = row * sliceHeight cpos = col * sliceWidth slice = image[rpos:rpos + sliceHeight, cpos:cpos + sliceWidth] volume[:, :, sliceNum] = slice sliceNum += 1 return volume
def validateRequestedFile(dir, file, textFileTypeOnly=False): # Restrict requests to certain directories and file types if WebSocketFileWatcher.allowedDirs is None or WebSocketFileWatcher.allowedTypes is None: raise StateError('Allowed Directories or File Types is not set') if file is not None and file != '': fileDir, filename = os.path.split(file) fileExtension = Path(filename).suffix if textFileTypeOnly: if fileExtension != '.txt': return False elif fileExtension not in WebSocketFileWatcher.allowedTypes: return False if fileDir is not None and fileDir != '': # and os.path.isabs(fileDir): dirMatch = False for allowedDir in WebSocketFileWatcher.allowedDirs: if fileDir.startswith(allowedDir): dirMatch = True break if dirMatch is False: return False if dir is not None and dir != '': for allowedDir in WebSocketFileWatcher.allowedDirs: if dir.startswith(allowedDir): return True return False # default case return True
def sendDataMsgFromThread(msg, timeout=None): if Web.wsDataConn is None: raise StateError("WebServer: No Data Websocket Connection") callbackStruct = StructDict() callbackStruct.event = threading.Event() # schedule the call with io thread Web.ioLoopInst.add_callback(Web.sendDataMessage, msg, callbackStruct) # wait for completion of call callbackStruct.event.wait(timeout) if callbackStruct.event.is_set() is False: raise TimeoutError("sendDataMessage: Data Request Timed Out({}) {}".format(timeout, msg)) if callbackStruct.response is None: raise StateError('sendDataMessage: callbackStruct.response is None for command {}'.format(msg)) if callbackStruct.status == 200 and 'writefile' in callbackStruct.response: writeResponseDataToFile(callbackStruct.response) return callbackStruct.response
def waitForFile(self, specificFileName, timeout=0): fileExists = os.path.exists(specificFileName) if not fileExists: if self.observer is None: raise FileNotFoundError("No fileNotifier and dicom file not found %s" % (specificFileName)) else: logStr = "FileWatcher: Waiting for file {}, timeout {}s ".format(specificFileName, timeout) logging.log(DebugLevels.L6, logStr) eventLoopCount = 0 exitWithFileEvent = False eventTimeStamp = 0 startTime = time.time() timeToCheckForFile = time.time() + 1 # check if file exists at least every second while not fileExists: if timeout > 0 and time.time() > (startTime + timeout): return None # look for file creation event eventLoopCount += 1 try: event, ts = self.fileNotifyQ.get(block=True, timeout=1.0) except Empty as err: # The timeout occured on fileNotifyQ.get() fileExists = os.path.exists(specificFileName) continue if event is None: raise StateError('waitForFile: event is None') # We may have a stale event from a previous file if multiple events # are created per file or if the previous file eventloop # timed out and then the event arrived later. if event.src_path == specificFileName: fileExists = True exitWithFileEvent = True eventTimeStamp = ts continue if time.time() > timeToCheckForFile: # periodically check if file exists, can occur if we get # swamped with unrelated events fileExists = os.path.exists(specificFileName) timeToCheckForFile = time.time() + 1 # wait for the full file to be written, wait at most 300 ms waitIncrement = 0.1 totalWriteWait = 0.0 fileSize = os.path.getsize(specificFileName) while fileSize < self.minFileSize and totalWriteWait < 0.3: time.sleep(waitIncrement) totalWriteWait += waitIncrement fileSize = os.path.getsize(specificFileName) logging.log(DebugLevels.L6, "File avail: eventLoopCount %d, writeWaitTime %.3f, " "fileEventCaptured %s, fileName %s, eventTimeStamp %.5f", eventLoopCount, totalWriteWait, exitWithFileEvent, specificFileName, eventTimeStamp) if self.demoStep is not None and self.demoStep > 0: self.prevEventTime = demoDelay(self.demoStep, self.prevEventTime) return specificFileName
def eventCallback(client, message): if RtAttenWeb.initialized is not True: raise StateError('webUserCallback: RtAttenWeb not initialized') request = json.loads(message) cmd = request['cmd'] logging.log(DebugLevels.L3, "WEB EVENT CMD: %s", cmd) if cmd == 'ttlPulse': # forward the ttlPulse to the subjectWindow cmd = {'cmd': 'ttlPulse'} RtAttenWeb.webServer.sendSubjMsgFromThread(json.dumps(cmd))
def createFeedbackImage(vol, catsep): if not os.path.exists(RtAttenWeb.sceneImageDir) or \ not os.path.exists(RtAttenWeb.faceNeutralImageDir) or \ not os.path.exists(RtAttenWeb.faceNegativeImageDir): raise InvocationError('Directory for FACE or SCENE missing: {} {} {}'. format(RtAttenWeb.sceneImageDir, RtAttenWeb.faceNeutralImageDir, RtAttenWeb.faceNegativeImageDir)) if RtAttenWeb.numSceneFiles == 0 or \ RtAttenWeb.numFaceNeutralFiles == 0 or \ RtAttenWeb.numFaceNegativeFiles == 0: raise StateError('Image Face/Scene directory missing jpg files') alpha = 0.5 sceneImagesDir = RtAttenWeb.sceneImageDir numSceneFiles = RtAttenWeb.numSceneFiles if vol == 'train': # for training set image as 60% face, 40% scene alpha = 0.4 faceImagesDir = RtAttenWeb.faceNeutralImageDir numFaceFiles = RtAttenWeb.numFaceNeutralFiles else: # calculate the biased alpha value gain = 2.3 x_shift = 0.2 y_shift = 0.12 steepness = 0.9 alpha = steepness / (1 + math.exp(-gain*(catsep-x_shift))) + y_shift faceImagesDir = RtAttenWeb.faceNegativeImageDir numFaceFiles = RtAttenWeb.numFaceNegativeFiles # Choose random number for which images to use faceRndNum = random.randint(1, numFaceFiles) sceneRndNum = random.randint(1, numSceneFiles) faceFilename = os.path.join(faceImagesDir, '{}.jpg'.format(faceRndNum)) sceneFilename = os.path.join(sceneImagesDir, '{}.jpg'.format(sceneRndNum)) faceImg = Image.open(faceFilename) sceneImg = Image.open(sceneFilename) # Blend the images together using the classification result value # When alpha is closer to 0 then 1st img param (face) will be move visible # and conversely when alpha is closer to 1 then 2nd img param (scene) will be more visible blendImg = Image.blend(faceImg, sceneImg, alpha=alpha) # add circle in center width, height = blendImg.size x_center = width / 2 y_center = height / 2 radius = 3 draw = ImageDraw.Draw(blendImg) draw.ellipse((x_center-radius, y_center-radius, x_center+radius, y_center+radius), fill="black", outline="black") # convert it to jpeg format and get the bytes to send jpgBuf = io.BytesIO() blendImg.save(jpgBuf, format='jpeg') jpgBytes = jpgBuf.getvalue() b64Data = b64encode(jpgBytes) b64StrData = b64Data.decode('utf-8') return b64StrData
def initFileNotifier(self, dir, filePattern, minFileSize, demoStep=0): # inotify doesn't use filepatterns self.demoStep = demoStep self.minFileSize = minFileSize if dir is None: raise StateError('initFileNotifier: dir is None') if not os.path.exists(dir): raise NotADirectoryError("No such directory: %s" % (dir)) if dir != self.watchDir: if self.watchDir is not None: self.notifier.remove_watch(self.watchDir) self.watchDir = dir self.notifier.add_watch(self.watchDir, mask=inotify.constants.IN_CLOSE_WRITE)
def loadImageData(self, filename): fileExtension = Path(filename).suffix if fileExtension == '.mat': data = utils.loadMatFile(filename) else: # Dicom file: if fileExtension != '.dcm': raise StateError( 'loadImageData: fileExtension not .dcm: {}'.format( fileExtension)) data = readDicomFromFile(filename) # Check that pixeldata can be read, will throw exception if not _ = data.pixel_array return data
def writeResponseDataToFile(response): '''For responses that have writefile set, write the data to a file''' global CommonOutputDir if response['status'] != 200: raise StateError('writeResponseDataToFile: status not 200') if 'writefile' in response and response['writefile'] is True: # write the returned data out to a file if 'data' not in response: raise StateError('writeResponseDataToFile: data field not in response: {}'.format(response)) if 'filename' not in response: del response['data'] raise StateError('writeResponseDataToFile: filename field not in response: {}'.format(response)) filename = response['filename'] decodedData = b64decode(response['data']) # prepend with common output path and write out file # note: can't just use os.path.join() because if two or more elements # have an aboslute path it discards the earlier elements outputFilename = os.path.normpath(CommonOutputDir + filename) dirName = os.path.dirname(outputFilename) if not os.path.exists(dirName): os.makedirs(dirName) writeFile(outputFilename, decodedData) response['filename'] = outputFilename del response['data']
def close(): # Currently this should never be called raise StateError("Web close() called") Web.threadLock.acquire() try: if Web.wsDataConn is not None: Web.wsDataConn.close() Web.wsDataConn = None for client in Web.wsUserConns[:]: client.close() Web.wsUserConns = [] for client in Web.wsSubjConns[:]: client.close() Web.wsSubjConns = [] finally: Web.threadLock.release()
def sendDataMessage(cmd, callbackStruct): if callbackStruct is None or callbackStruct.event is None: raise StateError("sendDataMessage: No threading.event attribute in callbackStruct") Web.threadLock.acquire() try: Web.dataSequenceNum += 1 seqNum = Web.dataSequenceNum cmd['seqNum'] = seqNum msg = json.dumps(cmd) callbackStruct.seqNum = seqNum callbackStruct.timeStamp = time.time() callbackStruct.status = 0 callbackStruct.error = None Web.dataCallbacks[seqNum] = callbackStruct Web.wsDataConn.write_message(msg) except Exception as err: errStr = 'sendDataMessage error: type {}: {}'.format(type(err), str(err)) raise RTError(errStr) finally: Web.threadLock.release()
def getNextTRData(self, run, fileNum): specificFileName = self.getDicomFileName(run.scanNum, fileNum) data = None if self.printFirstFilename: print("Loading first file: {}".format(specificFileName)) self.printFirstFilename = False if self.webUseRemoteFiles: statusCode = 408 # loop while filewatch timeout 408 occurs while statusCode == 408: watchCmd = wcutils.watchFileReqStruct(specificFileName) retVals = wcutils.clientWebpipeCmd(self.webpipes, watchCmd) statusCode = retVals.statusCode data = retVals.data if statusCode != 200: raise StateError( 'getNextTRData: statusCode not 200: {}'.format(statusCode)) else: self.fileWatcher.waitForFile(specificFileName) # Load the file, retry if necessary taking up to 500ms retries = 0 while retries < 5: retries += 1 try: data = self.loadImageData(specificFileName) # successful break except Exception as err: logging.warn( "LoadImage error, retry in 100 ms: {} ".format(err)) time.sleep(0.1) if data is None: return None fileExtension = Path(specificFileName).suffix if fileExtension == '.mat': trVol = data.vol elif fileExtension == '.dcm': trVol = parseDicomVolume(data, self.cfg.session.sliceDim) else: raise ValidationError( 'Only filenames of type .mat or .dcm supported') return trVol
def uploadImages(request): if 'cmd' not in request or request['cmd'] != "uploadImages": raise StateError('uploadImages: incorrect cmd request: {}'.format(request)) if RtAttenWeb.webServer.wsDataConn is None: # A remote fileWatcher hasn't connected yet errStr = 'Waiting for fileWatcher to attach, please try again momentarily' RtAttenWeb.webServer.setUserError(errStr) return try: scanFolder = request['scanFolder'] scanNum = int(request['scanNum']) numDicoms = int(request['numDicoms']) uploadType = request['type'] except KeyError as err: RtAttenWeb.webServer.setUserError("Registration request missing a parameter: {}".format(err)) return fileType = Path(RtAttenWeb.cfg.session.dicomNamePattern).suffix dicomsInProgressInterval = numDicoms / 4 intervalCount = 1 # send periodic progress reports to front-end response = {'cmd': 'uploadProgress', 'type': uploadType, 'progress': 'in-progress'} RtAttenWeb.webServer.sendUserMsgFromThread(json.dumps(response)) for i in range(1, numDicoms+1): filename = "001_{:06d}_{:06d}{}".format(scanNum, i, fileType) fullFilename = os.path.join(scanFolder, filename) try: cmd = getFileReqStruct(fullFilename, writefile=True) response = RtAttenWeb.webServer.sendDataMsgFromThread(cmd) if response['status'] != 200: raise RequestError(response['error']) except Exception as err: RtAttenWeb.webServer.setUserError( "Error uploading file {}: {}".format(fullFilename, str(err))) return if i > intervalCount * dicomsInProgressInterval: val = "{:.0f}%".format(1/4 * intervalCount * 100) # convert to a percentage response = {'cmd': 'uploadProgress', 'type': uploadType, 'progress': val} RtAttenWeb.webServer.sendUserMsgFromThread(json.dumps(response)) intervalCount += 1 response = {'cmd': 'uploadProgress', 'type': uploadType, 'progress': 'complete \u2714'} RtAttenWeb.webServer.sendUserMsgFromThread(json.dumps(response))
def getTrainedModel(self, sessionId, runId): """Retrieve a ML model trained in a previous run (runId). First see if it is cached in memory, if not load it from file and add it to the cache. """ model = self.modelCache.get(runId, None) if model is None: # load it from file logging.info("modelCache miss on runId %d", runId) fname = os.path.join(self.dirs.dataDir, getModelFilename(sessionId, runId)) if self.session.useSessionTimestamp is True: sessionWildcard = re.sub('T.*', 'T*', sessionId) filePattern = getModelFilename(sessionWildcard, runId) fname = utils.findNewestFile(self.dirs.dataDir, filePattern) model = utils.loadMatFile(fname) # loadMatFile should either raise an exception or return a value if model is None: raise StateError("Load model returned None: {}".format(fname)) if sessionId == self.id_fields.sessionId: self.modelCache[runId] = model return model
def resignalFifoThreadExit(fifoThread, webpipes): '''Under normal exit conditions the fifothread will exit when the fifo filehandles are closed. However if the fifo filehandles were never opened by both ends then the fifothread can be blocked waiting for them to open. To handle that case we open both filehandles with O_NONBLOCK flag so that if the fifo thread reader is listening it will be opened and closed, if not it will throw OSError exception in which case the fifothread has already exited and closed the fifo filehandles. ''' if fifoThread is None: return try: pipeout = os.open(webpipes.name_out, os.O_RDONLY | os.O_NONBLOCK) os.close(pipeout) pipein = os.open(webpipes.name_in, os.O_WRONLY | os.O_NONBLOCK) os.close(pipein) except OSError as err: # No reader/writer listening on file so fifoThread already exited pass fifoThread.join(timeout=1) if fifoThread.is_alive() is not False: raise StateError('runSession: fifoThread not completed')
def getPrevBlkGrp(self, sessionId, runId, blkGrpId): """Retrieve a block group patterns data, first see if it is cached in memory, if not load it from file and add it to the cache. """ bgKey = getBlkGrpKey(runId, blkGrpId) prev_bg = self.blkGrpCache.get(bgKey, None) if prev_bg is None: # load it from file logging.info("blkGrpCache miss on <runId, blkGrpId> %s", bgKey) fname = os.path.join(self.dirs.dataDir, getBlkGrpFilename(sessionId, runId, blkGrpId)) if self.session.useSessionTimestamp is True: sessionWildcard = re.sub('T.*', 'T*', sessionId) filePattern = getBlkGrpFilename(sessionWildcard, runId, blkGrpId) fname = utils.findNewestFile(self.dirs.dataDir, filePattern) prev_bg = utils.loadMatFile(fname) # loadMatFile should either raise an exception or return a value if prev_bg is None: raise StateError("Load blkGrp returned None: {}".format(fname)) if sessionId == self.id_fields.sessionId: self.blkGrpCache[bgKey] = prev_bg return prev_bg
def dataCallback(client, message): response = json.loads(message) if 'cmd' not in response: raise StateError('dataCallback: cmd field missing from response: {}'.format(response)) if 'status' not in response: raise StateError('dataCallback: status field missing from response: {}'.format(response)) if 'seqNum' not in response: raise StateError('dataCallback: seqNum field missing from response: {}'.format(response)) seqNum = response['seqNum'] origCmd = response['cmd'] logging.log(DebugLevels.L6, "callback {}: {} {}".format(seqNum, origCmd, response['status'])) # Thread Synchronized Section Web.threadLock.acquire() try: callbackStruct = Web.dataCallbacks.pop(seqNum, None) if callbackStruct is None: logging.error('WebServer: dataCallback seqNum {} not found, current seqNum {}' .format(seqNum, Web.dataSequenceNum)) return if callbackStruct.seqNum != seqNum: # This should never happen raise StateError('seqNum mismtach {} {}'.format(callbackStruct.seqNum, seqNum)) callbackStruct.response = response callbackStruct.status = response['status'] if callbackStruct.status == 200: if origCmd in ('ping', 'initWatch', 'putTextFile', 'dataLog'): pass elif origCmd in ('getFile', 'getNewestFile', 'watchFile'): if 'data' not in response: raise StateError('dataCallback: data field missing from response: {}'.format(response)) else: callbackStruct.error = 'Unrecognized origCmd {}'.format(origCmd) else: if 'error' not in response or response['error'] == '': raise StateError('dataCallback: error field missing from response: {}'.format(response)) callbackStruct.error = response['error'] callbackStruct.event.set() except Exception as err: logging.error('WebServer: dataCallback error: {}'.format(err)) raise err finally: Web.threadLock.release() if time.time() > Web.cbPruneTime: Web.cbPruneTime = time.time() + 60 Web.pruneCallbacks()
def initSession(self, cfg): self.cfgValidation(cfg) sessionDate = datetime.datetime.now() dateStr = cfg.session.date.lower() if dateStr != 'now' and dateStr != 'today': try: sessionDate = parser.parse(cfg.session.date) except ValueError as err: raise RequestError('Unable to parse date string {} {}'.format( cfg.session.date, err)) if cfg.session.sessionId in ( None, '') or cfg.session.useSessionTimestamp is True: # we didn't specify a session id because we want the session id to be the same as the scan date cfg.session.useSessionTimestamp = True cfg.session.sessionId = dateStr30(sessionDate.timetuple()) else: # cfg.session.useSessionTimestamp if true, then it will look for the newest files # from that same date # if cfg.session.useSessionTimestamp is false, you just want to use only specific files # from the exact session id if cfg.session.sessionId in (None, ''): raise InvocationError( "You need to provide a session Id in the config file or change " "your settings to set cfg.sesion.useSessionTimestamp to true." ) sessionStr = cfg.session.sessionId.lower() if sessionStr == 'now' or sessionStr == 'today': # we do specify now as the session id because we want the session files to be saved # with today's date even if it's different from the scanning date currentDate = datetime.datetime.now() cfg.session.sessionId = dateStr30(currentDate.timetuple()) cfg.session.useSessionTimestamp = True else: # we specified an exact session id that we want the files to be saved as # the exact string will then be used to find the files for training cfg.session.useSessionTimestamp = False logging.log(DebugLevels.L1, "## Start Session: %s, subNum%d subDay%d", cfg.session.sessionId, cfg.session.subjectNum, cfg.session.subjectDay) logging.log(DebugLevels.L1, "Config: %r", cfg) # Set Directories self.dirs.dataDir = getSubjectDataDir(cfg.session.dataDir, cfg.session.subjectNum, cfg.session.subjectDay) if self.webUseRemoteFiles: # Remote fileWatcher dataDir will be the same, but locally we want # the data directory to be a subset of a common output directory. self.dirs.remoteDataDir = self.dirs.dataDir cmd = {'cmd': 'webCommonDir'} retVals = wcutils.clientWebpipeCmd(self.webpipes, cmd) self.webCommonDir = retVals.filename self.dirs.dataDir = os.path.normpath(self.webCommonDir + self.dirs.dataDir) self.dirs.serverDataDir = getSubjectDataDir(cfg.session.serverDataDir, cfg.session.subjectNum, cfg.session.subjectDay) if os.path.abspath(self.dirs.serverDataDir): # strip the leading separator to make it a relative path self.dirs.serverDataDir = self.dirs.serverDataDir.lstrip(os.sep) if not os.path.exists(self.dirs.serverDataDir): os.makedirs(self.dirs.serverDataDir) if cfg.session.buildImgPath: datestr = sessionDate.strftime("%Y%m%d") imgDirName = "{}.{}.{}".format(datestr, cfg.session.subjectName, cfg.session.subjectName) self.dirs.imgDir = os.path.join(cfg.session.imgDir, imgDirName) else: self.dirs.imgDir = cfg.session.imgDir print("fMRI files being read from: {}".format(self.dirs.imgDir)) if self.webUseRemoteFiles: # send initWatch via webpipe initWatchCmd = wcutils.initWatchReqStruct( self.dirs.imgDir, cfg.session.watchFilePattern, cfg.session.minExpectedDicomSize, cfg.session.demoStep) wcutils.clientWebpipeCmd(self.webpipes, initWatchCmd) else: if not os.path.exists(self.dirs.imgDir): os.makedirs(self.dirs.imgDir) if self.fileWatcher is None: raise StateError('initSession: fileWatcher is None') self.fileWatcher.initFileNotifier(self.dirs.imgDir, cfg.session.watchFilePattern, cfg.session.minExpectedDicomSize, cfg.session.demoStep) # Load ROI mask - an array with 1s indicating the voxels of interest maskData = None maskFileName = 'mask_' + str(cfg.session.subjectNum) + '_' + str( cfg.session.subjectDay) + '.mat' if self.webUseRemoteFiles and cfg.session.getMasksFromControlRoom: # get the mask from remote site maskFileName = os.path.join(self.dirs.remoteDataDir, maskFileName) logging.info("Getting Remote Mask file: %s", maskFileName) getFileCmd = wcutils.getFileReqStruct(maskFileName) retVals = wcutils.clientWebpipeCmd(self.webpipes, getFileCmd) maskData = retVals.data print("Using remote mask {}".format(retVals.filename)) else: # read mask locally maskFileName = os.path.join(self.dirs.dataDir, maskFileName) logging.info("Getting Local Mask file: %s", maskFileName) maskData = utils.loadMatFile(maskFileName) print("Using mask {}".format(maskFileName)) roi = maskData.mask if type(roi) != np.ndarray: raise StateError('initSession: ROI type {} is not ndarray'.format( type(roi))) # find indices of non-zero elements in roi in row-major order but sorted by col-major order cfg.session.roiInds = utils.find(roi) cfg.session.roiDims = roi.shape cfg.session.nVoxels = cfg.session.roiInds.size super().initSession(cfg)
def webUserCallback(client, message): if RtAttenWeb.initialized is not True: raise StateError('webUserCallback: RtAttenWeb not initialized') request = json.loads(message) if 'config' in request: # Common code for any command that sends config information - retrieve the config info cfgData = request['config'] newCfg = recurseCreateStructDict(cfgData) if newCfg is not None: RtAttenWeb.cfg = newCfg else: if cfgData is None: errStr = 'webUserCallback: Config field is None' elif type(cfgData) not in (dict, list): errStr = 'webUserCallback: Config field wrong type {}'.format(type(cfgData)) else: errStr = 'webUserCallback: Error parsing config field {}'.format(cfgData) RtAttenWeb.webServer.setUserError(errStr) return cmd = request['cmd'] logging.log(DebugLevels.L3, "WEB USER CMD: %s", cmd) if cmd == "getDefaultConfig": if 'session' in RtAttenWeb.cfg: # remove the roiInds ndarray because it can't be Jsonified. del RtAttenWeb.cfg.session.roiInds RtAttenWeb.webServer.sendUserConfig(RtAttenWeb.cfg, filesremote=RtAttenWeb.filesremote) elif cmd == "run": if RtAttenWeb.runSessionThread is not None: RtAttenWeb.runSessionThread.join(timeout=1) if RtAttenWeb.runSessionThread.is_alive(): RtAttenWeb.webServer.setUserError("Client thread already runnning, skipping new request") return RtAttenWeb.runSessionThread = None RtAttenWeb.stopRun = False RtAttenWeb.runSessionThread = threading.Thread(name='runSessionThread', target=RtAttenWeb.runSession) RtAttenWeb.runSessionThread.setDaemon(True) RtAttenWeb.runSessionThread.start() elif cmd == "stop": if RtAttenWeb.runSessionThread is not None: RtAttenWeb.stopRun = True RtAttenWeb.runSessionThread.join(timeout=1) if not RtAttenWeb.runSessionThread.is_alive(): RtAttenWeb.runSessionThread = None RtAttenWeb.stopRun = False elif cmd == "runReg": if RtAttenWeb.registrationThread is not None: RtAttenWeb.registrationThread.join(timeout=1) if RtAttenWeb.registrationThread.is_alive(): RtAttenWeb.webServer.setUserError("Registraion thread already runnning, skipping new request") return RtAttenWeb.stopReg = False RtAttenWeb.registrationThread = threading.Thread(name='registrationThread', target=RtAttenWeb.runRegistration, args=(request,)) RtAttenWeb.registrationThread.setDaemon(True) RtAttenWeb.registrationThread.start() elif cmd == "stopReg": if RtAttenWeb.registrationThread is not None: RtAttenWeb.stopReg = True RtAttenWeb.registrationThread.join(timeout=1) if not RtAttenWeb.registrationThread.is_alive(): RtAttenWeb.registrationThread = None RtAttenWeb.stopReg = False elif cmd == "uploadImages": if RtAttenWeb.uploadImageThread is not None: RtAttenWeb.uploadImageThread.join(timeout=1) if RtAttenWeb.uploadImageThread.is_alive(): RtAttenWeb.webServer.setUserError("Registraion thread already runnning, skipping new request") return RtAttenWeb.uploadImageThread = threading.Thread(name='uploadImages', target=RtAttenWeb.uploadImages, args=(request,)) RtAttenWeb.uploadImageThread.setDaemon(True) RtAttenWeb.uploadImageThread.start() else: RtAttenWeb.webServer.setUserError("unknown command " + cmd)
def runRegistration(request, test=None): if 'cmd' not in request or request['cmd'] != "runReg": raise StateError('runRegistration: incorrect cmd request: {}'.format(request)) try: regConfig = request['regConfig'] regType = request['regType'] dayNum = float(regConfig['dayNum']) except KeyError as err: RtAttenWeb.webServer.setUserError("Registration missing a parameter ('regConfig', 'regType', 'dayNum')") return # Create the globals.sh file in registration directory RtAttenWeb.writeRegConfigFile(regConfig, registrationDir) # Start bash command and monitor output if test is not None: cmd = test elif regType == 'skullstrip': if dayNum != 1: RtAttenWeb.webServer.setUserError("Skullstrip can only be run for day1 data") return cmd = ['bash', 'skullstrip_t1.sh', '1'] if 'makenii' in regConfig and regConfig['makenii'] is False: cmd = ['bash', 'skullstrip_t1.sh'] elif regType == 'registration': if dayNum == 1: cmd = ['bash', 'reg_t1.sh'] else: cmd = ['bash', 'reg_epi_day2.sh', '1', '1'] elif regType == 'makemask': cmd = ['bash', 'run_makemask_nii.sh'] else: RtAttenWeb.webServer.setUserError("unknown registration type: " + regType) return proc = subprocess.Popen(cmd, cwd=registrationDir, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) lineQueue = queue.Queue() outputThread = threading.Thread(target=RtAttenWeb.procOutputReader, args=(proc, lineQueue)) outputThread.setDaemon(True) outputThread.start() outputLineCount = 0 line = 'start' statusInterval = 0.5 # interval (sec) for sending status updates statusTime = time.time() - statusInterval # subprocess poll returns None while subprocess is running while(proc.poll() is None or line != ''): currTime = time.time() if RtAttenWeb.stopReg is True: killPid(proc.pid) break if currTime >= statusTime + statusInterval: # send status statusTime = currTime # logging.log(logging.INFO, "psutil pid %d", proc.pid) procInfo = getProcessInfo(proc.pid, str(cmd)) response = {'cmd': 'regStatus', 'type': regType, 'status': procInfo} RtAttenWeb.webServer.sendUserMsgFromThread(json.dumps(response)) try: line = lineQueue.get(block=True, timeout=1) except queue.Empty: line = '' if line != '': # send output to web interface if test: print(line, end='') else: response = {'cmd': 'regLog', 'value': line} RtAttenWeb.webServer.sendUserMsgFromThread(json.dumps(response)) outputLineCount += 1 outputThread.join(timeout=1) if outputThread.is_alive(): print("OutputThread failed to exit") # processing complete, clear status endStatus = 'complete \u2714' if RtAttenWeb.stopReg is True: endStatus = 'stopped' response = {'cmd': 'regStatus', 'type': regType, 'status': endStatus} RtAttenWeb.webServer.sendUserMsgFromThread(json.dumps(response)) return outputLineCount