def _loadHTTP(self, filename): """Load PYMEs semi-custom HDF5 image data format. Offloads all the hard work to the HDFDataSource class""" import tables from PYME.IO.DataSources import HTTPDataSource, BGSDataSource #from PYME.LMVis import inpFilt #open hdf5 file self.dataSource = HTTPDataSource.DataSource(filename) #chain on a background subtraction data source, so we can easily do #background subtraction in the GUI the same way as in the analysis self.data = BGSDataSource.DataSource( self.dataSource) #this will get replaced with a wrapped version #try: #should be true the whole time self.mdh = MetaData.TIRFDefault self.mdh.copyEntriesFrom(self.dataSource.getMetadata()) #except: # self.mdh = MetaData.TIRFDefault # wx.MessageBox("Carrying on with defaults - no gaurantees it'll work well", 'ERROR: No metadata found in file ...', wx.OK) # print("ERROR: No metadata fond in file ... Carrying on with defaults - no gaurantees it'll work well") #attempt to estimate any missing parameters from the data itself MetaData.fillInBlanks(self.mdh, self.dataSource) #calculate the name to use when we do batch analysis on this #from PYME.ParallelTasks.relativeFiles import getRelFilename self.seriesName = filename self.events = self.dataSource.getEvents() self.mode = 'LM'
def _loadClusterPZF(self, filename): """Load PYMEs semi-custom HDF5 image data format. Offloads all the hard work to the HDFDataSource class""" from PYME.IO.DataSources import ClusterPZFDataSource, BGSDataSource self.dataSource = ClusterPZFDataSource.DataSource(filename) #chain on a background subtraction data source, so we can easily do #background subtraction in the GUI the same way as in the analysis self.data = BGSDataSource.DataSource( self.dataSource) #this will get replaced with a wrapped version #try: #should be true the whole time self.mdh = MetaData.TIRFDefault self.mdh.copyEntriesFrom(self.dataSource.getMetadata()) #attempt to estimate any missing parameters from the data itself MetaData.fillInBlanks(self.mdh, self.dataSource) #calculate the name to use when we do batch analysis on this #from PYME.ParallelTasks.relativeFiles import getRelFilename self.seriesName = filename self.events = self.dataSource.getEvents() self.mode = 'LM'
def _loadh5(self, filename): """Load PYMEs semi-custom HDF5 image data format. Offloads all the hard work to the HDFDataSource class""" import tables from PYME.IO.DataSources import HDFDataSource, BGSDataSource from PYME.IO import tabular self.dataSource = HDFDataSource.DataSource(filename, None) #chain on a background subtraction data source, so we can easily do #background subtraction in the GUI the same way as in the analysis self.data = BGSDataSource.DataSource( self.dataSource) #this will get replaced with a wrapped version if 'MetaData' in self.dataSource.h5File.root: #should be true the whole time self.mdh = MetaData.TIRFDefault self.mdh.copyEntriesFrom( MetaDataHandler.HDFMDHandler(self.dataSource.h5File)) else: self.mdh = MetaData.TIRFDefault import wx wx.MessageBox( "Carrying on with defaults - no gaurantees it'll work well", 'ERROR: No metadata found in file ...', wx.OK) print( "ERROR: No metadata fond in file ... Carrying on with defaults - no gaurantees it'll work well" ) #attempt to estimate any missing parameters from the data itself try: MetaData.fillInBlanks(self.mdh, self.dataSource) except: logger.exception('Error attempting to populate missing metadata') #calculate the name to use when we do batch analysis on this #from PYME.IO.FileUtils.nameUtils import getRelFilename self.seriesName = getRelFilename(filename) #try and find a previously performed analysis fns = filename.split(os.path.sep) cand = os.path.sep.join(fns[:-2] + [ 'analysis', ] + fns[-2:]) + 'r' print(cand) if False: #os.path.exists(cand): h5Results = tables.open_file(cand) if 'FitResults' in dir(h5Results.root): self.fitResults = h5Results.root.FitResults[:] self.resultsSource = tabular.H5RSource(h5Results) self.resultsMdh = MetaData.TIRFDefault self.resultsMdh.copyEntriesFrom( MetaDataHandler.HDFMDHandler(h5Results)) self.events = self.dataSource.getEvents() self.mode = 'LM'
def _loadTiff(self, filename): #from PYME.IO.FileUtils import readTiff from PYME.IO.DataSources import TiffDataSource, BGSDataSource mdfn = self._findAndParseMetadata(filename) self.dataSource = TiffDataSource.DataSource(filename, None) print(self.dataSource.shape) self.dataSource = BufferedDataSource.DataSource( self.dataSource, min(self.dataSource.getNumSlices(), 50)) self.data = self.dataSource #this will get replaced with a wrapped version if self.dataSource.getNumSlices( ) > 500: #this is likely to be a localization data set #background subtraction in the GUI the same way as in the analysis self.data = BGSDataSource.DataSource( self.dataSource ) #this will get replaced with a wrapped version print(self.data.shape) #if we have a multi channel data set, try and pull in all the channels if 'ChannelFiles' in self.mdh.getEntryNames() and not len( self.mdh['ChannelFiles']) == self.data.shape[3]: try: from PYME.IO.dataWrap import ListWrap #pull in all channels chans = [] for cf in self.mdh.getEntry('ChannelFiles'): cfn = os.path.join(os.path.split(filename)[0], cf) ds = TiffDataSource.DataSource(cfn, None) ds = BufferedDataSource.DataSource( ds, min(ds.getNumSlices(), 50)) chans.append(ds) self.data = ListWrap( chans) #this will get replaced with a wrapped version self.filename = mdfn except: pass elif 'ChannelNames' in self.mdh.getEntryNames() and len( self.mdh['ChannelNames']) == self.data.getNumSlices(): from PYME.IO.dataWrap import ListWrap chans = [ numpy.atleast_3d(self.data.getSlice(i)) for i in range(len(self.mdh['ChannelNames'])) ] self.data = ListWrap(chans) elif filename.endswith( '.lsm') and 'LSM.images_number_channels' in self.mdh.keys( ) and self.mdh['LSM.images_number_channels'] > 1: from PYME.IO.dataWrap import ListWrap nChans = self.mdh['LSM.images_number_channels'] chans = [] for n in range(nChans): ds = TiffDataSource.DataSource(filename, None, n) ds = BufferedDataSource.DataSource(ds, min(ds.getNumSlices(), 50)) chans.append(ds) self.data = ListWrap(chans) #from PYME.ParallelTasks.relativeFiles import getRelFilename self.seriesName = getRelFilename(filename) self.mode = 'default' if self.mdh.getOrDefault('ImageType', '') == 'PSF': self.mode = 'psf' elif self.dataSource.getNumSlices() > 5000: #likely to want to localize this self.mode = 'LM'