def pushImagesDS(self, image): resultsFilename = _verifyResultsFilename( genResultFileName(image.seriesName)) self.queueName = resultsFilename debugPrint('Results file = %s' % resultsFilename) self.resultsMdh = MetaDataHandler.NestedClassMDHandler( self.analysisMDH) self.resultsMdh['DataFileID'] = fileID.genDataSourceID( image.dataSource) mn = image.dataSource.moduleName #dsID = self.image.seriesName #if it's a buffered source, go back to underlying source if mn == 'BufferedDataSource': mn = image.dataSource.dataSource.moduleName self.tq.createQueue('DSTaskQueue', self.queueName, self.resultsMdh, mn, image.seriesName, resultsFilename, startAt=self.analysisMDH['Analysis.StartAt']) evts = image.dataSource.getEvents() if len(evts) > 0: self.tq.addQueueEvents(self.queueName, evts) debugPrint('Queue created') self.onImagesPushed.send(self)
def pushImagesHDF(self, image): dataFilename = image.seriesName resultsFilename = _verifyResultsFilename( genResultFileName(image.seriesName)) self.queueName = resultsFilename self.tq.createQueue('HDFTaskQueue', self.queueName, dataFilename=dataFilename, resultsFilename=resultsFilename, startAt='notYet') mdhQ = MetaDataHandler.QueueMDHandler(self.tq, self.queueName, self.analysisMDH) mdhQ['DataFileID'] = fileID.genDataSourceID(image.dataSource) # evts = self.image.dataSource.getEvents() # if len(evts) > 0: # self.tq.addQueueEvents(self.image.seriesName, evts) self.resultsMdh = mdhQ self.tq.releaseTasks(self.queueName, self.analysisMDH['Analysis.StartAt']) self.onImagesPushed.send(self)
def pushImagesHDF(startingAt=0, detThresh=.9, fitFcn='LatGaussFitFR'): global seriesName dataFilename = seriesName resultsFilename = genResultFileName(seriesName) while os.path.exists(resultsFilename): di, fn = os.path.split(resultsFilename) fdialog = wx.FileDialog( None, 'Analysis file already exists, please select a new filename', wildcard='H5R files|*.h5r', defaultDir=di, defaultFile=os.path.splitext(fn)[0] + '_1.h5r', style=wx.FD_SAVE) succ = fdialog.ShowModal() if (succ == wx.ID_OK): resultsFilename = fdialog.GetPath() else: raise RuntimeError('Invalid results file - not running') seriesName = resultsFilename tq.createQueue('HDFTaskQueue', seriesName, dataFilename=dataFilename, resultsFilename=resultsFilename, startAt='notYet') mdhQ = MetaDataHandler.QueueMDHandler(tq, seriesName, mdh) mdhQ.setEntry('Analysis.DetectionThreshold', detThresh) mdhQ.setEntry('Analysis.FitModule', fitFcn) mdhQ.setEntry('Analysis.DataFileID', fileID.genDataSourceID(dataSource)) evts = dataSource.getEvents() if len(evts) > 0: tq.addQueueEvents(seriesName, evts) tq.releaseTasks(seriesName, startingAt)
def __init__(self, name, resultsFilename, initialTasks=[], onEmpty=doNix, fTaskToPop=popZero): """ Generate a task queue which saves results to an HDF5 file using pytables NOTE: This is only ever used as a base class Args: name : string the queue name by which this set of task is identified resultsFilename: string the name of the output file initialTasks: list task to populate with initially - not used in practice onEmpty: what to do when the list of tasks is empty (nominally for closing output files etc ... but unused) fTaskToPop: a callback function which decides which task to give a worker. Returns the index of the task to return based on information about the current worker. An inital attempt at load balancing, which is now not really used. """ if resultsFilename is None: #autogenerate a filename if none given resultsFilename = genResultFileName(name) if os.path.exists( resultsFilename): #bail if output file already exists raise RuntimeError('Output file already exists: ' + resultsFilename) TaskQueue.__init__(self, name, initialTasks, onEmpty, fTaskToPop) self.resultsFilename = resultsFilename self.numClosedTasks = 0 #self.fileResultsLock = threading.Lock() self.fileResultsLock = tablesLock self.metaData = MetaDataHandler.NestedClassMDHandler() #self.metaData = None #MetaDataHandler.NestedClassMDHandler(self.resultsMDH) self.metaDataStale = True self.MDHCache = [] self.resultsQueue = [] #Queue.Queue() self.resultsQueueLock = threading.Lock() self.lastResultsQueuePurge = time.time() logging.info('Results file initialised')
def __init__(self, name, dataFilename=None, resultsFilename=None, onEmpty=doNix, fTaskToPop=popZero, startAt='guestimate', frameSize=(-1, -1), complevel=6, complib='zlib', resultsURI=None): if dataFilename is None: self.dataFilename = genDataFilename(name) else: self.dataFilename = dataFilename if resultsFilename is None: resultsFilename = genResultFileName(self.dataFilename) else: resultsFilename = resultsFilename ffn = getFullFilename(self.dataFilename) self.acceptNewTasks = False self.releaseNewTasks = False self.postTaskBuffer = [] initialTasks = [] self.resultsURI = resultsURI if os.path.exists(ffn): #file already exists - read from it self.h5DataFile = tables.open_file(ffn, 'r') #self.metaData = MetaData.genMetaDataFromHDF(self.h5DataFile) self.dataMDH = MetaDataHandler.NestedClassMDHandler( MetaDataHandler.HDFMDHandler(self.h5DataFile)) #self.dataMDH.mergeEntriesFrom(MetaData.TIRFDefault) self.imageData = self.h5DataFile.root.ImageData if startAt == 'guestimate': #calculate a suitable starting value tLon = self.dataMDH.EstimatedLaserOnFrameNo if tLon == 0: startAt = 0 else: startAt = tLon + 10 if startAt == 'notYet': initialTasks = [] else: initialTasks = list( range(startAt, self.h5DataFile.root.ImageData.shape[0])) self.imNum = len(self.imageData) self.dataRW = False else: #make ourselves a new file self.h5DataFile = tables.open_file(ffn, 'w') filt = tables.Filters(complevel, complib, shuffle=True) self.imageData = self.h5DataFile.create_earray( self.h5DataFile.root, 'ImageData', tables.UInt16Atom(), (0, ) + tuple(frameSize), filters=filt, chunkshape=(1, ) + tuple(frameSize)) self.events = self.h5DataFile.create_table(self.h5DataFile.root, 'Events', SpoolEvent, filters=filt) self.imNum = 0 self.acceptNewTasks = True self.dataMDH = MetaDataHandler.HDFMDHandler(self.h5DataFile) self.dataMDH.mergeEntriesFrom(MetaData.TIRFDefault) self.dataRW = True HDFResultsTaskQueue.__init__(self, name, resultsFilename, initialTasks, onEmpty, fTaskToPop) #self.resultsMDH.copyEntriesFrom(self.dataMDH) #self.metaData.copyEntriesFrom(self.resultsMDH) HDFResultsTaskQueue.setQueueMetaDataEntries(self, self.dataMDH) #copy events to results file if len(self.h5DataFile.root.Events) > 0: HDFResultsTaskQueue.addQueueEvents(self, self.h5DataFile.root.Events[:]) #self.resultsEvents.append(self.h5DataFile.root.Events[:]) self.queueID = name self.numSlices = self.imageData.shape[0] #self.dataFileLock = threading.Lock() self.dataFileLock = tablesLock #self.getTaskLock = threading.Lock() self.lastTaskTime = 0