def Loadh5(self, filename): '''Load PYMEs semi-custom HDF5 image data format. Offloads all the hard work to the HDFDataSource class''' import tables from PYME.Analysis.DataSources import HDFDataSource, BGSDataSource from PYME.Analysis.LMVis import inpFilt #open hdf5 file self.dataSource = HDFDataSource.DataSource(filename, None) #chain on a background subtraction data source, so we can easily do #background subtraction in the GUI the same way as in the analysis self.data = BGSDataSource.DataSource( self.dataSource) #this will get replaced with a wrapped version if 'MetaData' in self.dataSource.h5File.root: #should be true the whole time self.mdh = MetaData.TIRFDefault self.mdh.copyEntriesFrom( MetaDataHandler.HDFMDHandler(self.dataSource.h5File)) else: self.mdh = MetaData.TIRFDefault wx.MessageBox( "Carrying on with defaults - no gaurantees it'll work well", 'ERROR: No metadata found in file ...', wx.OK) print( "ERROR: No metadata fond in file ... Carrying on with defaults - no gaurantees it'll work well" ) #attempt to estimate any missing parameters from the data itself MetaData.fillInBlanks(self.mdh, self.dataSource) #calculate the name to use when we do batch analysis on this from PYME.ParallelTasks.relativeFiles import getRelFilename self.seriesName = getRelFilename(filename) #try and find a previously performed analysis fns = filename.split(os.path.sep) cand = os.path.sep.join(fns[:-2] + [ 'analysis', ] + fns[-2:]) + 'r' print(cand) if False: #os.path.exists(cand): h5Results = tables.openFile(cand) if 'FitResults' in dir(h5Results.root): self.fitResults = h5Results.root.FitResults[:] self.resultsSource = inpFilt.h5rSource(h5Results) self.resultsMdh = MetaData.TIRFDefault self.resultsMdh.copyEntriesFrom( MetaDataHandler.HDFMDHandler(h5Results)) self.events = self.dataSource.getEvents()
def LoadDBL(self, filename): '''Load Bewersdorf custom STED data. ''' mdfn = self.FindAndParseMetadata(filename) self.data = numpy.memmap(filename, dtype='<f4', mode='r', offset=128, shape=(self.mdh['Camera.ROIWidth'],self.mdh['Camera.ROIHeight'],self.mdh['NumImages']), order='F') from PYME.ParallelTasks.relativeFiles import getRelFilename self.seriesName = getRelFilename(filename) self.mode = 'default'
def LoadNPY(self, filename): '''Load numpy .npy data. ''' mdfn = self.FindAndParseMetadata(filename) self.data = numpy.load(filename) from PYME.ParallelTasks.relativeFiles import getRelFilename self.seriesName = getRelFilename(filename) self.mode = 'default'
def LoadImageSeries(self, filename): #from PYME.FileUtils import readTiff from PYME.Analysis.DataSources import ImageSeriesDataSource self.dataSource = ImageSeriesDataSource.DataSource(filename, None) self.dataSource = BufferedDataSource.DataSource(self.dataSource, min(self.dataSource.getNumSlices(), 50)) self.data = self.dataSource #this will get replaced with a wrapped version #self.data = readTiff.read3DTiff(filename) self.FindAndParseMetadata(filename) from PYME.ParallelTasks.relativeFiles import getRelFilename self.seriesName = getRelFilename(filename) self.mode = 'default'
def LoadImageSeries(self, filename): #from PYME.FileUtils import readTiff from PYME.Analysis.DataSources import ImageSeriesDataSource self.dataSource = ImageSeriesDataSource.DataSource(filename, None) self.dataSource = BufferedDataSource.DataSource( self.dataSource, min(self.dataSource.getNumSlices(), 50)) self.data = self.dataSource #this will get replaced with a wrapped version #self.data = readTiff.read3DTiff(filename) self.FindAndParseMetadata(filename) from PYME.ParallelTasks.relativeFiles import getRelFilename self.seriesName = getRelFilename(filename) self.mode = 'default'
def LoadPSF(self, filename): '''Load PYME .psf data. .psf files consist of a tuple containing the data and the voxelsize. ''' self.data, vox = numpy.load(filename) self.mdh = MetaData.ConfocDefault self.mdh.setEntry('voxelsize.x', vox.x) self.mdh.setEntry('voxelsize.y', vox.y) self.mdh.setEntry('voxelsize.z', vox.z) from PYME.ParallelTasks.relativeFiles import getRelFilename self.seriesName = getRelFilename(filename) self.mode = 'psf'
def Loadh5(self, filename): '''Load PYMEs semi-custom HDF5 image data format. Offloads all the hard work to the HDFDataSource class''' import tables from PYME.Analysis.DataSources import HDFDataSource, BGSDataSource from PYME.Analysis.LMVis import inpFilt #open hdf5 file self.dataSource = HDFDataSource.DataSource(filename, None) #chain on a background subtraction data source, so we can easily do #background subtraction in the GUI the same way as in the analysis self.data = BGSDataSource.DataSource(self.dataSource) #this will get replaced with a wrapped version if 'MetaData' in self.dataSource.h5File.root: #should be true the whole time self.mdh = MetaData.TIRFDefault self.mdh.copyEntriesFrom(MetaDataHandler.HDFMDHandler(self.dataSource.h5File)) else: self.mdh = MetaData.TIRFDefault wx.MessageBox("Carrying on with defaults - no gaurantees it'll work well", 'ERROR: No metadata found in file ...', wx.OK) print("ERROR: No metadata fond in file ... Carrying on with defaults - no gaurantees it'll work well") #attempt to estimate any missing parameters from the data itself MetaData.fillInBlanks(self.mdh, self.dataSource) #calculate the name to use when we do batch analysis on this from PYME.ParallelTasks.relativeFiles import getRelFilename self.seriesName = getRelFilename(filename) #try and find a previously performed analysis fns = filename.split(os.path.sep) cand = os.path.sep.join(fns[:-2] + ['analysis',] + fns[-2:]) + 'r' print(cand) if False:#os.path.exists(cand): h5Results = tables.openFile(cand) if 'FitResults' in dir(h5Results.root): self.fitResults = h5Results.root.FitResults[:] self.resultsSource = inpFilt.h5rSource(h5Results) self.resultsMdh = MetaData.TIRFDefault self.resultsMdh.copyEntriesFrom(MetaDataHandler.HDFMDHandler(h5Results)) self.events = self.dataSource.getEvents()
def LoadDBL(self, filename): '''Load Bewersdorf custom STED data. ''' mdfn = self.FindAndParseMetadata(filename) self.data = numpy.memmap(filename, dtype='<f4', mode='r', offset=128, shape=(self.mdh['Camera.ROIWidth'], self.mdh['Camera.ROIHeight'], self.mdh['NumImages']), order='F') from PYME.ParallelTasks.relativeFiles import getRelFilename self.seriesName = getRelFilename(filename) self.mode = 'default'
def LoadKdf(self, filename): '''load khorus formatted data - pretty much deprecated by now''' import PYME.cSMI as cSMI self.data = cSMI.CDataStack_AsArray(cSMI.CDataStack(filename), 0).squeeze() self.mdh = MetaData.TIRFDefault try: #try and get metadata from the .log file lf = open(os.path.splitext(filename)[0] + '.log') from PYME.DSView import logparser lp = logparser.logparser() log = lp.parse(lf.read()) lf.close() self.mdh.setEntry('voxelsize.z', log['PIEZOS']['Stepsize']) except: pass from PYME.ParallelTasks.relativeFiles import getRelFilename self.seriesName = getRelFilename(filename) self.mode = 'psf'
def LoadTiff(self, filename): #from PYME.FileUtils import readTiff from PYME.Analysis.DataSources import TiffDataSource mdfn = self.FindAndParseMetadata(filename) self.dataSource = TiffDataSource.DataSource(filename, None) self.dataSource = BufferedDataSource.DataSource(self.dataSource, min(self.dataSource.getNumSlices(), 50)) self.data = self.dataSource #this will get replaced with a wrapped version #if we have a multi channel data set, try and pull in all the channels if 'ChannelFiles' in self.mdh.getEntryNames(): try: from PYME.DSView.dataWrap import ListWrap #pull in all channels chans = [] for cf in self.mdh.getEntry('ChannelFiles'): cfn = os.path.join(os.path.split(filename)[0], cf) ds = TiffDataSource.DataSource(cfn, None) ds = BufferedDataSource.DataSource(ds, min(ds.getNumSlices(), 50)) chans.append(ds) self.data = ListWrap(chans) #this will get replaced with a wrapped version self.filename = mdfn except: pass elif 'ChannelNames' in self.mdh.getEntryNames() and len(self.mdh['ChannelNames']) == self.data.getNumSlices(): from PYME.DSView.dataWrap import ListWrap chans = [numpy.atleast_3d(self.data.getSlice(i)) for i in range(len(self.mdh['ChannelNames']))] self.data = ListWrap(chans) elif filename.endswith('.lsm') and 'LSM.images_number_channels' in self.mdh.keys() and self.mdh['LSM.images_number_channels'] > 1: from PYME.DSView.dataWrap import ListWrap nChans = self.mdh['LSM.images_number_channels'] chans = [] for n in range(nChans): ds = TiffDataSource.DataSource(filename, None, n) ds = BufferedDataSource.DataSource(ds, min(ds.getNumSlices(), 50)) chans.append(ds) self.data = ListWrap(chans) #self.data = readTiff.read3DTiff(filename) from PYME.ParallelTasks.relativeFiles import getRelFilename self.seriesName = getRelFilename(filename) self.mode = 'default'
def OnBStartSpoolButton(self, event=None, stack=False): '''GUI callback to start spooling. NB: this is also called programatically by the start stack button.''' #fn = wx.FileSelector('Save spooled data as ...', default_extension='.log',wildcard='*.log') #if not fn == '': #if the user cancelled # self.spooler = Spooler.Spooler(self.scope, fn, self.scope.pa, self) # self.bStartSpool.Enable(False) # self.bStopSpooling.Enable(True) # self.stSpoolingTo.Enable(True) # self.stNImages.Enable(True) # self.stSpoolingTo.SetLabel('Spooling to ' + fn) # self.stNImages.SetLabel('0 images spooled in 0 minutes') fn = self.tcSpoolFile.GetValue() if fn == '': #sanity checking wx.MessageBox('Please enter a series name', 'No series name given', wx.OK) return #bail if not os.path.exists(self.dirname): os.makedirs(self.dirname) if not self.dirname[-1] == os.sep: self.dirname += os.sep if (fn + '.h5') in os.listdir( self.dirname): #check to see if data with the same name exists ans = wx.MessageBox('A series with the same name already exists', 'Error', wx.OK) #overwriting doesn't work ... so just bail #increment the series counter first, though, so hopefully we don't get the same error on the next try self.seriesCounter += 1 self.seriesName = self._GenSeriesName() self.tcSpoolFile.SetValue(self.seriesName) #if ans == wx.NO: return #bail if self.cbCompress.GetValue(): compLevel = 2 else: compLevel = 0 if stack: protocol = self.protocolZ print(protocol) else: protocol = self.protocol if not preflight.ShowPreflightResults(self, self.protocol.PreflightCheck()): return #bail if we failed the pre flight check, and the user didn't choose to continue spoolType = self.rbQueue.GetStringSelection() #if self.cbQueue.GetValue(): if spoolType == 'Queue': self.queueName = getRelFilename(self.dirname + fn + '.h5') self.spooler = QueueSpooler.Spooler(self.scope, self.queueName, self.scope.pa, protocol, self, complevel=compLevel) self.bAnalyse.Enable(True) elif spoolType == 'HTTP': self.queueName = self.dirname + fn + '.h5' self.spooler = HTTPSpooler.Spooler(self.scope, self.queueName, self.scope.pa, protocol, self, complevel=compLevel) self.bAnalyse.Enable(True) else: self.spooler = HDFSpooler.Spooler(self.scope, self.dirname + fn + '.h5', self.scope.pa, protocol, self, complevel=compLevel) #if stack: # self.spooler.md.setEntry('ZStack', True) self.bStartSpool.Enable(False) self.bStartStack.Enable(False) self.bStopSpooling.Enable(True) self.stSpoolingTo.Enable(True) self.stNImages.Enable(True) self.stSpoolingTo.SetLabel('Spooling to ' + fn) self.stNImages.SetLabel('0 images spooled in 0 minutes') if sampInf: sampleInformation.getSampleData(self, self.spooler.md)
def OnBStartSpoolButton(self, event=None, stack=False): '''GUI callback to start spooling. NB: this is also called programatically by the start stack button.''' #fn = wx.FileSelector('Save spooled data as ...', default_extension='.log',wildcard='*.log') #if not fn == '': #if the user cancelled # self.spooler = Spooler.Spooler(self.scope, fn, self.scope.pa, self) # self.bStartSpool.Enable(False) # self.bStopSpooling.Enable(True) # self.stSpoolingTo.Enable(True) # self.stNImages.Enable(True) # self.stSpoolingTo.SetLabel('Spooling to ' + fn) # self.stNImages.SetLabel('0 images spooled in 0 minutes') fn = self.tcSpoolFile.GetValue() if fn == '': #sanity checking wx.MessageBox('Please enter a series name', 'No series name given', wx.OK) return #bail if not os.path.exists(self.dirname): os.makedirs(self.dirname) if not self.dirname[-1] == os.sep: self.dirname += os.sep if (fn + '.h5') in os.listdir(self.dirname): #check to see if data with the same name exists ans = wx.MessageBox('A series with the same name already exists', 'Error', wx.OK) #overwriting doesn't work ... so just bail #increment the series counter first, though, so hopefully we don't get the same error on the next try self.seriesCounter +=1 self.seriesName = self._GenSeriesName() self.tcSpoolFile.SetValue(self.seriesName) #if ans == wx.NO: return #bail if self.cbCompress.GetValue(): compLevel = 2 else: compLevel = 0 if stack: protocol = self.protocolZ print(protocol) else: protocol = self.protocol if not preflight.ShowPreflightResults(self, self.protocol.PreflightCheck()): return #bail if we failed the pre flight check, and the user didn't choose to continue spoolType = self.rbQueue.GetStringSelection() #if self.cbQueue.GetValue(): if spoolType == 'Queue': self.queueName = getRelFilename(self.dirname + fn + '.h5') self.spooler = QueueSpooler.Spooler(self.scope, self.queueName, self.scope.pa, protocol, self, complevel=compLevel) self.bAnalyse.Enable(True) elif spoolType == 'HTTP': self.queueName = self.dirname + fn + '.h5' self.spooler = HTTPSpooler.Spooler(self.scope, self.queueName, self.scope.pa, protocol, self, complevel=compLevel) self.bAnalyse.Enable(True) else: self.spooler = HDFSpooler.Spooler(self.scope, self.dirname + fn + '.h5', self.scope.pa, protocol, self, complevel=compLevel) #if stack: # self.spooler.md.setEntry('ZStack', True) self.bStartSpool.Enable(False) self.bStartStack.Enable(False) self.bStopSpooling.Enable(True) self.stSpoolingTo.Enable(True) self.stNImages.Enable(True) self.stSpoolingTo.SetLabel('Spooling to ' + fn) self.stNImages.SetLabel('0 images spooled in 0 minutes') if sampInf: sampleInformation.getSampleData(self, self.spooler.md)
def LoadTiff(self, filename): #from PYME.FileUtils import readTiff from PYME.Analysis.DataSources import TiffDataSource mdfn = self.FindAndParseMetadata(filename) self.dataSource = TiffDataSource.DataSource(filename, None) self.dataSource = BufferedDataSource.DataSource( self.dataSource, min(self.dataSource.getNumSlices(), 50)) self.data = self.dataSource #this will get replaced with a wrapped version #if we have a multi channel data set, try and pull in all the channels if 'ChannelFiles' in self.mdh.getEntryNames(): try: from PYME.DSView.dataWrap import ListWrap #pull in all channels chans = [] for cf in self.mdh.getEntry('ChannelFiles'): cfn = os.path.join(os.path.split(filename)[0], cf) ds = TiffDataSource.DataSource(cfn, None) ds = BufferedDataSource.DataSource( ds, min(ds.getNumSlices(), 50)) chans.append(ds) self.data = ListWrap( chans) #this will get replaced with a wrapped version self.filename = mdfn except: pass elif 'ChannelNames' in self.mdh.getEntryNames() and len( self.mdh['ChannelNames']) == self.data.getNumSlices(): from PYME.DSView.dataWrap import ListWrap chans = [ numpy.atleast_3d(self.data.getSlice(i)) for i in range(len(self.mdh['ChannelNames'])) ] self.data = ListWrap(chans) elif filename.endswith( '.lsm') and 'LSM.images_number_channels' in self.mdh.keys( ) and self.mdh['LSM.images_number_channels'] > 1: from PYME.DSView.dataWrap import ListWrap nChans = self.mdh['LSM.images_number_channels'] chans = [] for n in range(nChans): ds = TiffDataSource.DataSource(filename, None, n) ds = BufferedDataSource.DataSource(ds, min(ds.getNumSlices(), 50)) chans.append(ds) self.data = ListWrap(chans) #self.data = readTiff.read3DTiff(filename) from PYME.ParallelTasks.relativeFiles import getRelFilename self.seriesName = getRelFilename(filename) self.mode = 'default'