def __init__(self, filename, taskQueue=None): self.filename = getFullFilename(filename)#convert relative path to full path #self.data = readTiff.read3DTiff(self.filename) #use metadata for glob md = MetaDataHandler.SimpleMDHandler(self.filename) pattern = md.getEntry('SeriesPattern') self.files = glob.glob(os.path.join(os.path.split(self.filename)[0], pattern)) self.files.sort() self.im0 = Image.open(self.files[0]) #self.im.seek(0) #PIL's endedness support is subtly broken - try to fix it #NB this is untested for floating point tiffs self.endedness = 'LE' if self.im0.ifd.prefix =='MM': self.endedness = 'BE' print((self.im0.ifd.prefix)) print((self.endedness))
def __init__(self, filename, taskQueue, chanNum=0): self.filename = getFullFilename( filename) #convert relative path to full path self.chanNum = chanNum #self.data = readTiff.read3DTiff(self.filename) #self.im = Image.open(filename) #self.im.seek(0) #PIL's endedness support is subtly broken - try to fix it #NB this is untested for floating point tiffs #self.endedness = 'LE' #if self.im.ifd.prefix =='MM': # self.endedness = 'BE' #to find the number of images we have to loop over them all #this is obviously not ideal as PIL loads the image data into memory for each #slice and this is going to represent a huge performance penalty for large stacks #should still let them be opened without having all images in memory at once though #self.numSlices = self.im.tell() #try: # while True: # self.numSlices += 1 # self.im.seek(self.numSlices) #except EOFError: # pass print '> TiffDataSource.py: init from file', self.filename self.im = tifffile.TIFFfile(self.filename).series[0].pages
def __init__(self, filename, taskQueue, chanNum = 0): self.filename = getFullFilename(filename)#convert relative path to full path self.chanNum = chanNum #self.data = readTiff.read3DTiff(self.filename) #self.im = Image.open(filename) #self.im.seek(0) #PIL's endedness support is subtly broken - try to fix it #NB this is untested for floating point tiffs #self.endedness = 'LE' #if self.im.ifd.prefix =='MM': # self.endedness = 'BE' #to find the number of images we have to loop over them all #this is obviously not ideal as PIL loads the image data into memory for each #slice and this is going to represent a huge performance penalty for large stacks #should still let them be opened without having all images in memory at once though #self.numSlices = self.im.tell() #try: # while True: # self.numSlices += 1 # self.im.seek(self.numSlices) #except EOFError: # pass print '> TiffDataSource.py: init from file', self.filename self.im = tifffile.TIFFfile(self.filename).series[0].pages
def __init__(self, h5Filename, taskQueue=None): self.h5Filename = getFullFilename(h5Filename)#convert relative path to full path self.h5File = tables.openFile(self.h5Filename)
def __init__(self, name, dataFilename = None, resultsFilename=None, onEmpty = doNix, fTaskToPop = popZero, startAt = 'guestimate', frameSize=(-1,-1), complevel=6, complib='zlib'): if dataFilename is None: self.dataFilename = genDataFilename(name) else: self.dataFilename = dataFilename if resultsFilename is None: resultsFilename = genResultFileName(self.dataFilename) else: resultsFilename = resultsFilename ffn = getFullFilename(self.dataFilename) self.acceptNewTasks = False self.releaseNewTasks = False self.postTaskBuffer = [] initialTasks = [] if os.path.exists(ffn): #file already exists - read from it self.h5DataFile = tables.openFile(ffn, 'r') #self.metaData = MetaData.genMetaDataFromHDF(self.h5DataFile) self.dataMDH = MetaDataHandler.NestedClassMDHandler(MetaDataHandler.HDFMDHandler(self.h5DataFile)) #self.dataMDH.mergeEntriesFrom(MetaData.TIRFDefault) self.imageData = self.h5DataFile.root.ImageData if startAt == 'guestimate': #calculate a suitable starting value tLon = self.dataMDH.EstimatedLaserOnFrameNo if tLon == 0: startAt = 0 else: startAt = tLon + 10 if startAt == 'notYet': initialTasks = [] else: initialTasks = list(range(startAt, self.h5DataFile.root.ImageData.shape[0])) self.imNum = len(self.imageData) self.dataRW = False else: #make ourselves a new file self.h5DataFile = tables.openFile(ffn, 'w') filt = tables.Filters(complevel, complib, shuffle=True) self.imageData = self.h5DataFile.createEArray(self.h5DataFile.root, 'ImageData', tables.UInt16Atom(), (0,)+tuple(frameSize), filters=filt, chunkshape=(1,)+tuple(frameSize)) self.events = self.h5DataFile.createTable(self.h5DataFile.root, 'Events', SpoolEvent,filters=filt) self.imNum=0 self.acceptNewTasks = True self.dataMDH = MetaDataHandler.HDFMDHandler(self.h5DataFile) self.dataMDH.mergeEntriesFrom(MetaData.TIRFDefault) self.dataRW = True HDFResultsTaskQueue.__init__(self, name, resultsFilename, initialTasks, onEmpty, fTaskToPop) self.resultsMDH.copyEntriesFrom(self.dataMDH) self.metaData.copyEntriesFrom(self.resultsMDH) #copy events to results file if len (self.h5DataFile.root.Events) > 0: self.resultsEvents.append(self.h5DataFile.root.Events[:]) self.queueID = name self.numSlices = self.imageData.shape[0] #self.dataFileLock = threading.Lock() self.dataFileLock = tablesLock #self.getTaskLock = threading.Lock() self.lastTaskTime = 0
def __init__(self, name, dataFilename=None, resultsFilename=None, onEmpty=doNix, fTaskToPop=popZero, startAt='guestimate', frameSize=(-1, -1), complevel=6, complib='zlib'): if dataFilename is None: self.dataFilename = genDataFilename(name) else: self.dataFilename = dataFilename if resultsFilename is None: resultsFilename = genResultFileName(self.dataFilename) else: resultsFilename = resultsFilename ffn = getFullFilename(self.dataFilename) self.acceptNewTasks = False self.releaseNewTasks = False self.postTaskBuffer = [] initialTasks = [] if os.path.exists(ffn): #file already exists - read from it self.h5DataFile = tables.openFile(ffn, 'r') #self.metaData = MetaData.genMetaDataFromHDF(self.h5DataFile) self.dataMDH = MetaDataHandler.NestedClassMDHandler( MetaDataHandler.HDFMDHandler(self.h5DataFile)) #self.dataMDH.mergeEntriesFrom(MetaData.TIRFDefault) self.imageData = self.h5DataFile.root.ImageData if startAt == 'guestimate': #calculate a suitable starting value tLon = self.dataMDH.EstimatedLaserOnFrameNo if tLon == 0: startAt = 0 else: startAt = tLon + 10 if startAt == 'notYet': initialTasks = [] else: initialTasks = list( range(startAt, self.h5DataFile.root.ImageData.shape[0])) self.imNum = len(self.imageData) self.dataRW = False else: #make ourselves a new file self.h5DataFile = tables.openFile(ffn, 'w') filt = tables.Filters(complevel, complib, shuffle=True) self.imageData = self.h5DataFile.createEArray( self.h5DataFile.root, 'ImageData', tables.UInt16Atom(), (0, ) + tuple(frameSize), filters=filt, chunkshape=(1, ) + tuple(frameSize)) self.events = self.h5DataFile.createTable(self.h5DataFile.root, 'Events', SpoolEvent, filters=filt) self.imNum = 0 self.acceptNewTasks = True self.dataMDH = MetaDataHandler.HDFMDHandler(self.h5DataFile) self.dataMDH.mergeEntriesFrom(MetaData.TIRFDefault) self.dataRW = True HDFResultsTaskQueue.__init__(self, name, resultsFilename, initialTasks, onEmpty, fTaskToPop) self.resultsMDH.copyEntriesFrom(self.dataMDH) self.metaData.copyEntriesFrom(self.resultsMDH) #copy events to results file if len(self.h5DataFile.root.Events) > 0: self.resultsEvents.append(self.h5DataFile.root.Events[:]) self.queueID = name self.numSlices = self.imageData.shape[0] #self.dataFileLock = threading.Lock() self.dataFileLock = tablesLock #self.getTaskLock = threading.Lock() self.lastTaskTime = 0
def __init__(self, h5Filename, taskQueue=None): self.h5Filename = getFullFilename( h5Filename) #convert relative path to full path self.h5File = tables.openFile(self.h5Filename)