def LoadHTTP(self, filename): '''Load PYMEs semi-custom HDF5 image data format. Offloads all the hard work to the HDFDataSource class''' import tables from PYME.Analysis.DataSources import HTTPDataSource, BGSDataSource #from PYME.Analysis.LMVis import inpFilt #open hdf5 file self.dataSource = HTTPDataSource.DataSource(filename) #chain on a background subtraction data source, so we can easily do #background subtraction in the GUI the same way as in the analysis self.data = BGSDataSource.DataSource(self.dataSource) #this will get replaced with a wrapped version #try: #should be true the whole time self.mdh = MetaData.TIRFDefault self.mdh.copyEntriesFrom(self.dataSource.getMetadata()) #except: # self.mdh = MetaData.TIRFDefault # wx.MessageBox("Carrying on with defaults - no gaurantees it'll work well", 'ERROR: No metadata found in file ...', wx.OK) # print("ERROR: No metadata fond in file ... Carrying on with defaults - no gaurantees it'll work well") #attempt to estimate any missing parameters from the data itself MetaData.fillInBlanks(self.mdh, self.dataSource) #calculate the name to use when we do batch analysis on this #from PYME.ParallelTasks.relativeFiles import getRelFilename self.seriesName = filename self.events = self.dataSource.getEvents()
def LoadQueue(self, filename): '''Load data from a remote PYME.ParallelTasks.HDFTaskQueue queue using Pyro. Parameters: filename the name of the queue ''' import Pyro.core from PYME.Analysis.DataSources import TQDataSource from PYME.misc.computerName import GetComputerName compName = GetComputerName() if self.queueURI == None: #do a lookup taskQueueName = 'TaskQueues.%s' % compName self.tq = Pyro.core.getProxyForURI('PYRONAME://' + taskQueueName) else: self.tq = Pyro.core.getProxyForURI(self.queueURI) self.seriesName = filename[len('QUEUE://'):] self.dataSource = TQDataSource.DataSource(self.seriesName, self.tq) self.data = self.dataSource #this will get replaced with a wrapped version self.mdh = MetaDataHandler.QueueMDHandler(self.tq, self.seriesName) MetaData.fillInBlanks(self.mdh, self.dataSource) #self.timer.WantNotification.append(self.dsRefresh) self.events = self.dataSource.getEvents()
def _loadHTTP(self, filename): """Load PYMEs semi-custom HDF5 image data format. Offloads all the hard work to the HDFDataSource class""" import tables from PYME.IO.DataSources import HTTPDataSource, BGSDataSource #from PYME.LMVis import inpFilt #open hdf5 file self.dataSource = HTTPDataSource.DataSource(filename) #chain on a background subtraction data source, so we can easily do #background subtraction in the GUI the same way as in the analysis self.data = BGSDataSource.DataSource( self.dataSource) #this will get replaced with a wrapped version #try: #should be true the whole time self.mdh = MetaData.TIRFDefault self.mdh.copyEntriesFrom(self.dataSource.getMetadata()) #except: # self.mdh = MetaData.TIRFDefault # wx.MessageBox("Carrying on with defaults - no gaurantees it'll work well", 'ERROR: No metadata found in file ...', wx.OK) # print("ERROR: No metadata fond in file ... Carrying on with defaults - no gaurantees it'll work well") #attempt to estimate any missing parameters from the data itself MetaData.fillInBlanks(self.mdh, self.dataSource) #calculate the name to use when we do batch analysis on this #from PYME.ParallelTasks.relativeFiles import getRelFilename self.seriesName = filename self.events = self.dataSource.getEvents() self.mode = 'LM'
def _loadClusterPZF(self, filename): """Load PYMEs semi-custom HDF5 image data format. Offloads all the hard work to the HDFDataSource class""" from PYME.IO.DataSources import ClusterPZFDataSource, BGSDataSource self.dataSource = ClusterPZFDataSource.DataSource(filename) #chain on a background subtraction data source, so we can easily do #background subtraction in the GUI the same way as in the analysis self.data = BGSDataSource.DataSource( self.dataSource) #this will get replaced with a wrapped version #try: #should be true the whole time self.mdh = MetaData.TIRFDefault self.mdh.copyEntriesFrom(self.dataSource.getMetadata()) #attempt to estimate any missing parameters from the data itself MetaData.fillInBlanks(self.mdh, self.dataSource) #calculate the name to use when we do batch analysis on this #from PYME.ParallelTasks.relativeFiles import getRelFilename self.seriesName = filename self.events = self.dataSource.getEvents() self.mode = 'LM'
def _loadh5(self, filename): """Load PYMEs semi-custom HDF5 image data format. Offloads all the hard work to the HDFDataSource class""" import tables from PYME.IO.DataSources import HDFDataSource, BGSDataSource from PYME.IO import tabular self.dataSource = HDFDataSource.DataSource(filename, None) #chain on a background subtraction data source, so we can easily do #background subtraction in the GUI the same way as in the analysis self.data = BGSDataSource.DataSource( self.dataSource) #this will get replaced with a wrapped version if 'MetaData' in self.dataSource.h5File.root: #should be true the whole time self.mdh = MetaData.TIRFDefault self.mdh.copyEntriesFrom( MetaDataHandler.HDFMDHandler(self.dataSource.h5File)) else: self.mdh = MetaData.TIRFDefault import wx wx.MessageBox( "Carrying on with defaults - no gaurantees it'll work well", 'ERROR: No metadata found in file ...', wx.OK) print( "ERROR: No metadata fond in file ... Carrying on with defaults - no gaurantees it'll work well" ) #attempt to estimate any missing parameters from the data itself try: MetaData.fillInBlanks(self.mdh, self.dataSource) except: logger.exception('Error attempting to populate missing metadata') #calculate the name to use when we do batch analysis on this #from PYME.IO.FileUtils.nameUtils import getRelFilename self.seriesName = getRelFilename(filename) #try and find a previously performed analysis fns = filename.split(os.path.sep) cand = os.path.sep.join(fns[:-2] + [ 'analysis', ] + fns[-2:]) + 'r' print(cand) if False: #os.path.exists(cand): h5Results = tables.open_file(cand) if 'FitResults' in dir(h5Results.root): self.fitResults = h5Results.root.FitResults[:] self.resultsSource = tabular.H5RSource(h5Results) self.resultsMdh = MetaData.TIRFDefault self.resultsMdh.copyEntriesFrom( MetaDataHandler.HDFMDHandler(h5Results)) self.events = self.dataSource.getEvents() self.mode = 'LM'
def Loadh5(self, filename): '''Load PYMEs semi-custom HDF5 image data format. Offloads all the hard work to the HDFDataSource class''' import tables from PYME.Analysis.DataSources import HDFDataSource, BGSDataSource from PYME.Analysis.LMVis import inpFilt #open hdf5 file self.dataSource = HDFDataSource.DataSource(filename, None) #chain on a background subtraction data source, so we can easily do #background subtraction in the GUI the same way as in the analysis self.data = BGSDataSource.DataSource(self.dataSource) #this will get replaced with a wrapped version if 'MetaData' in self.dataSource.h5File.root: #should be true the whole time self.mdh = MetaData.TIRFDefault self.mdh.copyEntriesFrom(MetaDataHandler.HDFMDHandler(self.dataSource.h5File)) else: self.mdh = MetaData.TIRFDefault wx.MessageBox("Carrying on with defaults - no gaurantees it'll work well", 'ERROR: No metadata found in file ...', wx.OK) print("ERROR: No metadata fond in file ... Carrying on with defaults - no gaurantees it'll work well") #attempt to estimate any missing parameters from the data itself MetaData.fillInBlanks(self.mdh, self.dataSource) #calculate the name to use when we do batch analysis on this from PYME.ParallelTasks.relativeFiles import getRelFilename self.seriesName = getRelFilename(filename) #try and find a previously performed analysis fns = filename.split(os.path.sep) cand = os.path.sep.join(fns[:-2] + ['analysis',] + fns[-2:]) + 'r' print(cand) if False:#os.path.exists(cand): h5Results = tables.openFile(cand) if 'FitResults' in dir(h5Results.root): self.fitResults = h5Results.root.FitResults[:] self.resultsSource = inpFilt.h5rSource(h5Results) self.resultsMdh = MetaData.TIRFDefault self.resultsMdh.copyEntriesFrom(MetaDataHandler.HDFMDHandler(h5Results)) self.events = self.dataSource.getEvents()
def _loadQueue(self, filename): """Load data from a remote PYME.ParallelTasks.HDFTaskQueue queue using Pyro. Parameters: ----------- filename : string the name of the queue """ import Pyro.core from PYME.IO.DataSources import TQDataSource from PYME.misc.computerName import GetComputerName compName = GetComputerName() if self.queueURI is None: #do a lookup taskQueueName = 'TaskQueues.%s' % compName try: from PYME.misc import pyme_zeroconf ns = pyme_zeroconf.getNS() URI = ns.resolve(taskQueueName) except: URI = 'PYRONAME://' + taskQueueName self.tq = Pyro.core.getProxyForURI(URI) else: self.tq = Pyro.core.getProxyForURI(self.queueURI) self.seriesName = filename[len('QUEUE://'):] self.dataSource = TQDataSource.DataSource(self.seriesName, self.tq) self.data = self.dataSource #this will get replaced with a wrapped version self.mdh = MetaDataHandler.QueueMDHandler(self.tq, self.seriesName) MetaData.fillInBlanks(self.mdh, self.dataSource) #self.timer.WantNotification.append(self.dsRefresh) self.events = self.dataSource.getEvents() self.mode = 'LM'
def __init__(self, dsviewer): self.dsviewer = dsviewer if 'tq' in dir(dsviewer): self.tq = dsviewer.tq else: self.tq = None self.image = dsviewer.image self.view = dsviewer.view self.do = dsviewer.do #this should only occur for files types which we weren't expecting to process #as LM data (eg tiffs) if not 'EstimatedLaserOnFrameNo' in self.image.mdh.getEntryNames(): from PYME.Analysis import MetaData #try: MetaData.fillInBlanks(self.image.mdh, self.image.dataSource) #except IndexError: # pass if 'fitResults' in dir(self.image): self.fitResults = self.image.fitResults else: self.fitResults = [] if 'resultsMdh' in dir(self.image): self.resultsMdh = self.image.resultsMdh mTasks = wx.Menu() TASKS_STANDARD_2D = wx.NewId() TASKS_CALIBRATE_SPLITTER = wx.NewId() TASKS_2D_SPLITTER = wx.NewId() TASKS_3D = wx.NewId() TASKS_3D_SPLITTER = wx.NewId() TASKS_PRI = wx.NewId() mTasks.Append(TASKS_STANDARD_2D, "Normal 2D analysis", "", wx.ITEM_NORMAL) mTasks.Append(TASKS_CALIBRATE_SPLITTER, "Calibrating the splitter", "", wx.ITEM_NORMAL) mTasks.Append(TASKS_2D_SPLITTER, "2D with splitter", "", wx.ITEM_NORMAL) mTasks.Append(TASKS_3D, "3D analysis", "", wx.ITEM_NORMAL) mTasks.Append(TASKS_3D_SPLITTER, "3D with splitter", "", wx.ITEM_NORMAL) mTasks.Append(TASKS_PRI, "PRI", "", wx.ITEM_NORMAL) self.dsviewer.menubar.Append(mTasks, "Set defaults for") wx.EVT_MENU(self.dsviewer, TASKS_CALIBRATE_SPLITTER, self.OnCalibrateSplitter) wx.EVT_MENU(self.dsviewer, TASKS_STANDARD_2D, self.OnStandard2D) wx.EVT_MENU(self.dsviewer, TASKS_2D_SPLITTER, self.OnSpitter2D) wx.EVT_MENU(self.dsviewer, TASKS_3D, self.OnStandard3D) wx.EVT_MENU(self.dsviewer, TASKS_3D_SPLITTER, self.OnSpliter3D) wx.EVT_MENU(self.dsviewer, TASKS_PRI, self.OnPRI3D) BG_SUBTRACT = wx.NewId() self.dsviewer.view_menu.AppendCheckItem(BG_SUBTRACT, 'Subtract Background') wx.EVT_MENU(self.dsviewer, BG_SUBTRACT, self.OnToggleBackground) #a timer object to update for us self.timer = mytimer() self.timer.Start(10000) self.analDispMode = 'z' self.numAnalysed = 0 self.numEvents = 0 dsviewer.pipeline = pipeline.Pipeline() self.ds = None dsviewer.paneHooks.append(self.GenPointFindingPanel) dsviewer.paneHooks.append(self.GenAnalysisPanel) dsviewer.paneHooks.append(self.GenFitStatusPanel) dsviewer.updateHooks.append(self.update) dsviewer.statusHooks.append(self.GetStatusText) if 'Protocol.DataStartsAt' in self.image.mdh.getEntryNames(): self.do.zp = self.image.mdh.getEntry('Protocol.DataStartsAt') else: self.do.zp = self.image.mdh.getEntry('EstimatedLaserOnFrameNo')