예제 #1
0
    def _loadClusterPZF(self, filename):
        """Load PYMEs semi-custom HDF5 image data format. Offloads all the
        hard work to the HDFDataSource class"""

        from PYME.IO.DataSources import ClusterPZFDataSource, BGSDataSource

        self.dataSource = ClusterPZFDataSource.DataSource(filename)
        #chain on a background subtraction data source, so we can easily do
        #background subtraction in the GUI the same way as in the analysis
        self.data = BGSDataSource.DataSource(
            self.dataSource)  #this will get replaced with a wrapped version

        #try: #should be true the whole time
        self.mdh = MetaData.TIRFDefault
        self.mdh.copyEntriesFrom(self.dataSource.getMetadata())

        #attempt to estimate any missing parameters from the data itself
        MetaData.fillInBlanks(self.mdh, self.dataSource)

        #calculate the name to use when we do batch analysis on this
        #from PYME.ParallelTasks.relativeFiles import getRelFilename
        self.seriesName = filename

        self.events = self.dataSource.getEvents()

        self.mode = 'LM'
예제 #2
0
    def LoadQueue(self, filename):
        '''Load data from a remote PYME.ParallelTasks.HDFTaskQueue queue using
        Pyro.
        
        Parameters:

            filename    the name of the queue         
        
        '''
        import Pyro.core
        from PYME.Analysis.DataSources import TQDataSource
        from PYME.misc.computerName import GetComputerName
        compName = GetComputerName()

        if self.queueURI == None:
            #do a lookup
            taskQueueName = 'TaskQueues.%s' % compName
            self.tq = Pyro.core.getProxyForURI('PYRONAME://' + taskQueueName)
        else:
            self.tq = Pyro.core.getProxyForURI(self.queueURI)

        self.seriesName = filename[len('QUEUE://'):]

        self.dataSource = TQDataSource.DataSource(self.seriesName, self.tq)
        self.data = self.dataSource  #this will get replaced with a wrapped version

        self.mdh = MetaDataHandler.QueueMDHandler(self.tq, self.seriesName)
        MetaData.fillInBlanks(self.mdh, self.dataSource)

        #self.timer.WantNotification.append(self.dsRefresh)

        self.events = self.dataSource.getEvents()
예제 #3
0
파일: image.py 프로젝트: RuralCat/CLipPYME
    def LoadQueue(self, filename):
        '''Load data from a remote PYME.ParallelTasks.HDFTaskQueue queue using
        Pyro.
        
        Parameters:

            filename    the name of the queue         
        
        '''
        import Pyro.core
        from PYME.Analysis.DataSources import TQDataSource
        from PYME.misc.computerName import GetComputerName
        compName = GetComputerName()

        if self.queueURI == None:
            #do a lookup
            taskQueueName = 'TaskQueues.%s' % compName
            self.tq = Pyro.core.getProxyForURI('PYRONAME://' + taskQueueName)
        else:
            self.tq = Pyro.core.getProxyForURI(self.queueURI)

        self.seriesName = filename[len('QUEUE://'):]

        self.dataSource = TQDataSource.DataSource(self.seriesName, self.tq)
        self.data = self.dataSource #this will get replaced with a wrapped version

        self.mdh = MetaDataHandler.QueueMDHandler(self.tq, self.seriesName)
        MetaData.fillInBlanks(self.mdh, self.dataSource)

        #self.timer.WantNotification.append(self.dsRefresh)

        self.events = self.dataSource.getEvents()
예제 #4
0
    def __init__(self,
                 spooler,
                 seriesName,
                 analysisMetadata,
                 resultsFilename=None,
                 startAt=0,
                 serverfilter=clusterIO.local_serverfilter,
                 **kwargs):
        # TODO - reduce duplication of `LocalisationRule.__init__()` and `LocalisationRule._setup()`
        from PYME.IO import MetaDataHandler
        from PYME.Analysis import MetaData
        from PYME.IO.FileUtils.nameUtils import genClusterResultFileName
        from PYME.IO import unifiedIO

        self.spooler = spooler

        if resultsFilename is None:
            resultsFilename = genClusterResultFileName(seriesName)

        resultsFilename = verify_cluster_results_filename(resultsFilename)
        logger.info('Results file: ' + resultsFilename)

        resultsMdh = MetaDataHandler.DictMDHandler()
        # NB - anything passed in analysis MDH will wipe out corresponding entries in the series metadata
        resultsMdh.update(self.spooler.md)
        resultsMdh.update(analysisMetadata)
        resultsMdh['EstimatedLaserOnFrameNo'] = resultsMdh.getOrDefault(
            'EstimatedLaserOnFrameNo',
            resultsMdh.getOrDefault('Analysis.StartAt', 0))
        MetaData.fixEMGain(resultsMdh)

        self._setup(seriesName, resultsMdh, resultsFilename, startAt,
                    serverfilter)

        Rule.__init__(self, **kwargs)
예제 #5
0
    def _loadHTTP(self, filename):
        """Load PYMEs semi-custom HDF5 image data format. Offloads all the
        hard work to the HDFDataSource class"""
        import tables
        from PYME.IO.DataSources import HTTPDataSource, BGSDataSource
        #from PYME.LMVis import inpFilt

        #open hdf5 file
        self.dataSource = HTTPDataSource.DataSource(filename)
        #chain on a background subtraction data source, so we can easily do
        #background subtraction in the GUI the same way as in the analysis
        self.data = BGSDataSource.DataSource(
            self.dataSource)  #this will get replaced with a wrapped version

        #try: #should be true the whole time
        self.mdh = MetaData.TIRFDefault
        self.mdh.copyEntriesFrom(self.dataSource.getMetadata())
        #except:
        #    self.mdh = MetaData.TIRFDefault
        #    wx.MessageBox("Carrying on with defaults - no gaurantees it'll work well", 'ERROR: No metadata found in file ...', wx.OK)
        #    print("ERROR: No metadata fond in file ... Carrying on with defaults - no gaurantees it'll work well")

        #attempt to estimate any missing parameters from the data itself
        MetaData.fillInBlanks(self.mdh, self.dataSource)

        #calculate the name to use when we do batch analysis on this
        #from PYME.ParallelTasks.relativeFiles import getRelFilename
        self.seriesName = filename

        self.events = self.dataSource.getEvents()

        self.mode = 'LM'
예제 #6
0
파일: image.py 프로젝트: RuralCat/CLipPYME
    def LoadHTTP(self, filename):
        '''Load PYMEs semi-custom HDF5 image data format. Offloads all the
        hard work to the HDFDataSource class'''
        import tables
        from PYME.Analysis.DataSources import HTTPDataSource, BGSDataSource
        #from PYME.Analysis.LMVis import inpFilt
        
        #open hdf5 file
        self.dataSource = HTTPDataSource.DataSource(filename)
        #chain on a background subtraction data source, so we can easily do 
        #background subtraction in the GUI the same way as in the analysis
        self.data = BGSDataSource.DataSource(self.dataSource) #this will get replaced with a wrapped version

        #try: #should be true the whole time
        self.mdh = MetaData.TIRFDefault
        self.mdh.copyEntriesFrom(self.dataSource.getMetadata())
        #except:
        #    self.mdh = MetaData.TIRFDefault
        #    wx.MessageBox("Carrying on with defaults - no gaurantees it'll work well", 'ERROR: No metadata found in file ...', wx.OK)
        #    print("ERROR: No metadata fond in file ... Carrying on with defaults - no gaurantees it'll work well")

        #attempt to estimate any missing parameters from the data itself        
        MetaData.fillInBlanks(self.mdh, self.dataSource)

        #calculate the name to use when we do batch analysis on this        
        #from PYME.ParallelTasks.relativeFiles import getRelFilename
        self.seriesName = filename

        self.events = self.dataSource.getEvents()
예제 #7
0
    def _loadh5(self, filename):
        """Load PYMEs semi-custom HDF5 image data format. Offloads all the
        hard work to the HDFDataSource class"""
        import tables
        from PYME.IO.DataSources import HDFDataSource, BGSDataSource
        from PYME.IO import tabular

        self.dataSource = HDFDataSource.DataSource(filename, None)
        #chain on a background subtraction data source, so we can easily do
        #background subtraction in the GUI the same way as in the analysis
        self.data = BGSDataSource.DataSource(
            self.dataSource)  #this will get replaced with a wrapped version

        if 'MetaData' in self.dataSource.h5File.root:  #should be true the whole time
            self.mdh = MetaData.TIRFDefault
            self.mdh.copyEntriesFrom(
                MetaDataHandler.HDFMDHandler(self.dataSource.h5File))
        else:
            self.mdh = MetaData.TIRFDefault
            import wx
            wx.MessageBox(
                "Carrying on with defaults - no gaurantees it'll work well",
                'ERROR: No metadata found in file ...', wx.OK)
            print(
                "ERROR: No metadata fond in file ... Carrying on with defaults - no gaurantees it'll work well"
            )

        #attempt to estimate any missing parameters from the data itself
        try:
            MetaData.fillInBlanks(self.mdh, self.dataSource)
        except:
            logger.exception('Error attempting to populate missing metadata')

        #calculate the name to use when we do batch analysis on this
        #from PYME.IO.FileUtils.nameUtils import getRelFilename
        self.seriesName = getRelFilename(filename)

        #try and find a previously performed analysis
        fns = filename.split(os.path.sep)
        cand = os.path.sep.join(fns[:-2] + [
            'analysis',
        ] + fns[-2:]) + 'r'
        print(cand)
        if False:  #os.path.exists(cand):
            h5Results = tables.open_file(cand)

            if 'FitResults' in dir(h5Results.root):
                self.fitResults = h5Results.root.FitResults[:]
                self.resultsSource = tabular.H5RSource(h5Results)

                self.resultsMdh = MetaData.TIRFDefault
                self.resultsMdh.copyEntriesFrom(
                    MetaDataHandler.HDFMDHandler(h5Results))

        self.events = self.dataSource.getEvents()

        self.mode = 'LM'
예제 #8
0
    def __init__(self,
                 seriesName,
                 analysisMetadata,
                 resultsFilename=None,
                 startAt=0,
                 dataSourceModule=None,
                 serverfilter=clusterIO.local_serverfilter,
                 **kwargs):
        from PYME.IO import MetaDataHandler
        from PYME.Analysis import MetaData
        from PYME.IO.FileUtils.nameUtils import genClusterResultFileName
        from PYME.IO import unifiedIO

        unifiedIO.assert_uri_ok(seriesName)

        if resultsFilename is None:
            resultsFilename = genClusterResultFileName(seriesName)

        resultsFilename = verify_cluster_results_filename(resultsFilename)
        logger.info('Results file: ' + resultsFilename)

        resultsMdh = MetaDataHandler.NestedClassMDHandler()
        # NB - anything passed in analysis MDH will wipe out corresponding entries in the series metadata
        resultsMdh.update(
            json.loads(unifiedIO.read(seriesName + '/metadata.json')))
        resultsMdh.update(analysisMetadata)

        resultsMdh['EstimatedLaserOnFrameNo'] = resultsMdh.getOrDefault(
            'EstimatedLaserOnFrameNo',
            resultsMdh.getOrDefault('Analysis.StartAt', 0))
        MetaData.fixEMGain(resultsMdh)

        self._setup(seriesName, resultsMdh, resultsFilename, startAt,
                    serverfilter)

        #load data source
        if dataSourceModule is None:
            DataSource = DataSources.getDataSourceForFilename(seriesName)
        else:
            DataSource = __import__(
                'PYME.IO.DataSources.' + dataSourceModule,
                fromlist=['PYME', 'io',
                          'DataSources']).DataSource  #import our data source

        self.ds = DataSource(seriesName)

        logger.debug('DataSource.__class__: %s' % self.ds.__class__)

        Rule.__init__(self, **kwargs)
예제 #9
0
def launch_localize(analysisMDH, seriesName):
    """
    Pushes an analysis task for a given series to the distributor

    Parameters
    ----------
    analysisMDH : dictionary-like
        MetaDataHandler describing the analysis tasks to launch
    seriesName : str
        cluster path, e.g. pyme-cluster:///example_folder/series
    Returns
    -------

    """
    #import logging
    import json
    #from PYME.ParallelTasks import HTTPTaskPusher
    from PYME.IO import MetaDataHandler
    from PYME.Analysis import MetaData
    from PYME.IO.FileUtils.nameUtils import genClusterResultFileName
    from PYME.IO import unifiedIO

    unifiedIO.assert_uri_ok(seriesName)
    seriesName = seriesName

    resultsFilename = verify_cluster_results_filename(
        genClusterResultFileName(seriesName))
    logger.info('Results file: ' + resultsFilename)

    resultsMdh = MetaDataHandler.NestedClassMDHandler()
    # NB - anything passed in analysis MDH will wipe out corresponding entries in the series metadata
    resultsMdh.update(json.loads(unifiedIO.read(seriesName +
                                                '/metadata.json')))
    resultsMdh.update(analysisMDH)

    resultsMdh['EstimatedLaserOnFrameNo'] = resultsMdh.getOrDefault(
        'EstimatedLaserOnFrameNo',
        resultsMdh.getOrDefault('Analysis.StartAt', 0))
    MetaData.fixEMGain(resultsMdh)
    # resultsMdh['DataFileID'] = fileID.genDataSourceID(image.dataSource)

    # TODO - do we need to keep track of the pushers in some way (we currently rely on the fact that the pushing thread
    # will hold a reference
    pusher = HTTPRulePusher(dataSourceID=seriesName,
                            metadata=resultsMdh,
                            resultsFilename=resultsFilename)

    logging.debug('Queue created')
예제 #10
0
def generateThumbnail(inputFile, thumbSize):
    global size
    #logging.debug('Input File: %s\n' % inputFile)
    #logging.debug('Ouput File: %s\n' % outputFile)
    #logging.debug('Thumb Size: %s\n' % thumbSize)

    h5f = tables.open_file(inputFile)

    dataSource = HDFDataSource.DataSource(inputFile, None)

    md = MetaData.genMetaDataFromSourceAndMDH(
        dataSource, MetaDataHandler.HDFMDHandler(h5f))

    xsize = h5f.root.ImageData.shape[1]
    ysize = h5f.root.ImageData.shape[2]

    if xsize > ysize:
        zoom = float(thumbSize) / xsize
    else:
        zoom = float(thumbSize) / ysize

    size = (int(xsize * zoom), int(ysize * zoom))

    im = h5f.root.ImageData[min(md.EstimatedLaserOnFrameNo + 10,
                                (h5f.root.ImageData.shape[0] -
                                 1)), :, :].astype('f')

    im = im.T - min(md.Camera.ADOffset, im.min())

    h5f.close()

    im = maximum(minimum(1 * (255 * im) / im.max(), 255), 0)

    return im.astype('uint8')
예제 #11
0
def generateThumbnail(inputFile, thumbSize):
    global size
    # logging.debug('Input File: %s\n' % inputFile)
    # logging.debug('Ouput File: %s\n' % outputFile)
    # logging.debug('Thumb Size: %s\n' % thumbSize)

    h5f = tables.openFile(inputFile)

    dataSource = HDFDataSource.DataSource(inputFile, None)

    md = MetaData.genMetaDataFromSourceAndMDH(dataSource, MetaDataHandler.HDFMDHandler(h5f))

    xsize = h5f.root.ImageData.shape[1]
    ysize = h5f.root.ImageData.shape[2]

    if xsize > ysize:
        zoom = float(thumbSize) / xsize
    else:
        zoom = float(thumbSize) / ysize

    size = (int(xsize * zoom), int(ysize * zoom))

    im = h5f.root.ImageData[min(md.EstimatedLaserOnFrameNo + 10, (h5f.root.ImageData.shape[0] - 1)), :, :].astype("f")

    im = im.T - min(md.Camera.ADOffset, im.min())

    h5f.close()

    im = maximum(minimum(1 * (255 * im) / im.max(), 255), 0)

    return im.astype("uint8")
예제 #12
0
def convertFile(pathToData,
                outFile,
                frameSize=[256, 256],
                pixelsize=None,
                complib='zlib',
                complevel=9):

    #seriesName = pathToData.split(os.sep)[-2]

    #fnl = os.listdir(pathToData)
    #fnl2 = [pathToData + f for f in fnl]

    #f1 = read_kdf.ReadKdfData(pathToData).squeeze()

    xSize, ySize = frameSize

    f1 = open(pathToData, 'rb')

    #detect file length
    f1.seek(0, 2)  #seek to end
    fLength = f1.tell()
    f1.seek(0)  #back to begining

    nFrames = fLength / (2 * xSize * ySize)

    outF = tables.openFile(outFile, 'w')

    filt = tables.Filters(complevel, complib, shuffle=True)

    imageData = outF.createEArray(outF.root,
                                  'ImageData',
                                  tables.UInt16Atom(), (0, xSize, ySize),
                                  filters=filt,
                                  expectedrows=nFrames)

    for i in range(nFrames):
        d1 = numpy.fromfile(f1, '>u2', xSize * ySize) >> 4
        imageData.append(d1.reshape(1, xSize, ySize))
        if i % 100 == 0:
            print(('%d of %d frames' % (i, nFrames)))

    f1.close()

    hdh = MetaData.HDFMDHandler(outF, MetaData.TIRFDefault)

    if not pixelsize == None:
        hdh.setEntry('voxelsize.x', pixelsize)
        hdh.setEntry('voxelsize.y', pixelsize)

    outEvents = outF.createTable(outF.root,
                                 'Events',
                                 SpoolEvent,
                                 filters=tables.Filters(complevel=5,
                                                        shuffle=True))

    outF.flush()
    outF.close()
예제 #13
0
파일: image.py 프로젝트: RuralCat/CLipPYME
    def Loadh5(self, filename):
        '''Load PYMEs semi-custom HDF5 image data format. Offloads all the
        hard work to the HDFDataSource class'''
        import tables
        from PYME.Analysis.DataSources import HDFDataSource, BGSDataSource
        from PYME.Analysis.LMVis import inpFilt
        
        #open hdf5 file
        self.dataSource = HDFDataSource.DataSource(filename, None)
        #chain on a background subtraction data source, so we can easily do 
        #background subtraction in the GUI the same way as in the analysis
        self.data = BGSDataSource.DataSource(self.dataSource) #this will get replaced with a wrapped version

        if 'MetaData' in self.dataSource.h5File.root: #should be true the whole time
            self.mdh = MetaData.TIRFDefault
            self.mdh.copyEntriesFrom(MetaDataHandler.HDFMDHandler(self.dataSource.h5File))
        else:
            self.mdh = MetaData.TIRFDefault
            wx.MessageBox("Carrying on with defaults - no gaurantees it'll work well", 'ERROR: No metadata found in file ...', wx.OK)
            print("ERROR: No metadata fond in file ... Carrying on with defaults - no gaurantees it'll work well")

        #attempt to estimate any missing parameters from the data itself        
        MetaData.fillInBlanks(self.mdh, self.dataSource)

        #calculate the name to use when we do batch analysis on this        
        from PYME.ParallelTasks.relativeFiles import getRelFilename
        self.seriesName = getRelFilename(filename)

        #try and find a previously performed analysis
        fns = filename.split(os.path.sep)
        cand = os.path.sep.join(fns[:-2] + ['analysis',] + fns[-2:]) + 'r'
        print(cand)
        if False:#os.path.exists(cand):
            h5Results = tables.openFile(cand)

            if 'FitResults' in dir(h5Results.root):
                self.fitResults = h5Results.root.FitResults[:]
                self.resultsSource = inpFilt.h5rSource(h5Results)

                self.resultsMdh = MetaData.TIRFDefault
                self.resultsMdh.copyEntriesFrom(MetaDataHandler.HDFMDHandler(h5Results))

        self.events = self.dataSource.getEvents()
예제 #14
0
    def _loadQueue(self, filename):
        """Load data from a remote PYME.ParallelTasks.HDFTaskQueue queue using
        Pyro.
        
        Parameters:
        -----------

        filename  : string
            the name of the queue         
        
        """
        import Pyro.core
        from PYME.IO.DataSources import TQDataSource
        from PYME.misc.computerName import GetComputerName
        compName = GetComputerName()

        if self.queueURI is None:
            #do a lookup
            taskQueueName = 'TaskQueues.%s' % compName

            try:
                from PYME.misc import pyme_zeroconf
                ns = pyme_zeroconf.getNS()
                URI = ns.resolve(taskQueueName)
            except:
                URI = 'PYRONAME://' + taskQueueName

            self.tq = Pyro.core.getProxyForURI(URI)
        else:
            self.tq = Pyro.core.getProxyForURI(self.queueURI)

        self.seriesName = filename[len('QUEUE://'):]

        self.dataSource = TQDataSource.DataSource(self.seriesName, self.tq)
        self.data = self.dataSource  #this will get replaced with a wrapped version

        self.mdh = MetaDataHandler.QueueMDHandler(self.tq, self.seriesName)
        MetaData.fillInBlanks(self.mdh, self.dataSource)

        #self.timer.WantNotification.append(self.dsRefresh)

        self.events = self.dataSource.getEvents()
        self.mode = 'LM'
예제 #15
0
def convertFile(pathToData,
                outFile,
                pixelsize=None,
                complib='zlib',
                complevel=9):

    #seriesName = pathToData.split(os.sep)[-2]

    #fnl = os.listdir(pathToData)
    #fnl2 = [pathToData + f for f in fnl]

    f1 = read_kdf.ReadKdfData(pathToData).squeeze()

    xSize, ySize = f1.shape[0:2]

    outF = tables.openFile(outFile, 'w')

    filt = tables.Filters(complevel, complib, shuffle=True)

    imageData = outF.createEArray(outF.root,
                                  'ImageData',
                                  tables.UInt16Atom(), (0, xSize, ySize),
                                  filters=filt,
                                  expectedrows=f1.shape[2])

    for i in range(f1.shape[2]):
        imageData.append(f1[:, :, i].reshape(1, xSize, ySize))

    hdh = MetaData.HDFMDHandler(outF, MetaData.PCODefault)

    if not pixelsize == None:
        hdh.setEntry('voxelsize.x', pixelsize)
        hdh.setEntry('voxelsize.y', pixelsize)

    outEvents = outF.createTable(outF.root,
                                 'Events',
                                 SpoolEvent,
                                 filters=tables.Filters(complevel=5,
                                                        shuffle=True))

    outF.flush()
    outF.close()
예제 #16
0
interpModel_by_chan = [None, None, None, None]


def interpModel(chan=0):
    im = interpModel_by_chan[chan]
    if im is None and not chan == 0:
        return interpModel_by_chan[0]
    else:
        return im


dx = None
dy = None
dz = None

mdh = MetaData.NestedClassMDHandler(MetaData.TIRFDefault)


def set_pixelsize_nm(pixelsize):
    mdh['voxelsize.x'] = 1e-3 * pixelsize
    mdh['voxelsize.y'] = 1e-3 * pixelsize


def genTheoreticalModel(md, zernikes={}, **kwargs):
    from PYME.Analysis.PSFGen import fourierHNA
    global IntXVals, IntYVals, IntZVals, dx, dy, dz

    if True:  #not dx == md.voxelsize.x*1e3 or not dy == md.voxelsize.y*1e3 or not dz == md.voxelsize.z*1e3:

        vs = md.voxelsize_nm
        IntXVals = vs.x * np.mgrid[-150:150]
예제 #17
0
    def __init__(self, dsviewer):
        self.dsviewer = dsviewer
        if 'tq' in dir(dsviewer):
            self.tq = dsviewer.tq
        else:
            self.tq = None

        self.image = dsviewer.image
        self.view = dsviewer.view
        self.do = dsviewer.do

        #this should only occur for files types which we weren't expecting to process
        #as LM data (eg tiffs)
        if not 'EstimatedLaserOnFrameNo' in self.image.mdh.getEntryNames():
            from PYME.Analysis import MetaData
            #try:
            MetaData.fillInBlanks(self.image.mdh, self.image.dataSource)
            #except IndexError:
            #    pass

        if 'fitResults' in dir(self.image):
            self.fitResults = self.image.fitResults
        else:
            self.fitResults = []

        if 'resultsMdh' in dir(self.image):
            self.resultsMdh = self.image.resultsMdh

        mTasks = wx.Menu()
        TASKS_STANDARD_2D = wx.NewId()
        TASKS_CALIBRATE_SPLITTER = wx.NewId()
        TASKS_2D_SPLITTER = wx.NewId()
        TASKS_3D = wx.NewId()
        TASKS_3D_SPLITTER = wx.NewId()
        TASKS_PRI = wx.NewId()
        mTasks.Append(TASKS_STANDARD_2D, "Normal 2D analysis", "",
                      wx.ITEM_NORMAL)
        mTasks.Append(TASKS_CALIBRATE_SPLITTER, "Calibrating the splitter", "",
                      wx.ITEM_NORMAL)
        mTasks.Append(TASKS_2D_SPLITTER, "2D with splitter", "",
                      wx.ITEM_NORMAL)
        mTasks.Append(TASKS_3D, "3D analysis", "", wx.ITEM_NORMAL)
        mTasks.Append(TASKS_3D_SPLITTER, "3D with splitter", "",
                      wx.ITEM_NORMAL)
        mTasks.Append(TASKS_PRI, "PRI", "", wx.ITEM_NORMAL)
        self.dsviewer.menubar.Append(mTasks, "Set defaults for")

        wx.EVT_MENU(self.dsviewer, TASKS_CALIBRATE_SPLITTER,
                    self.OnCalibrateSplitter)
        wx.EVT_MENU(self.dsviewer, TASKS_STANDARD_2D, self.OnStandard2D)
        wx.EVT_MENU(self.dsviewer, TASKS_2D_SPLITTER, self.OnSpitter2D)
        wx.EVT_MENU(self.dsviewer, TASKS_3D, self.OnStandard3D)
        wx.EVT_MENU(self.dsviewer, TASKS_3D_SPLITTER, self.OnSpliter3D)
        wx.EVT_MENU(self.dsviewer, TASKS_PRI, self.OnPRI3D)

        BG_SUBTRACT = wx.NewId()
        self.dsviewer.view_menu.AppendCheckItem(BG_SUBTRACT,
                                                'Subtract Background')
        wx.EVT_MENU(self.dsviewer, BG_SUBTRACT, self.OnToggleBackground)

        #a timer object to update for us
        self.timer = mytimer()
        self.timer.Start(10000)

        self.analDispMode = 'z'

        self.numAnalysed = 0
        self.numEvents = 0

        dsviewer.pipeline = pipeline.Pipeline()
        self.ds = None

        dsviewer.paneHooks.append(self.GenPointFindingPanel)
        dsviewer.paneHooks.append(self.GenAnalysisPanel)
        dsviewer.paneHooks.append(self.GenFitStatusPanel)

        dsviewer.updateHooks.append(self.update)
        dsviewer.statusHooks.append(self.GetStatusText)

        if 'Protocol.DataStartsAt' in self.image.mdh.getEntryNames():
            self.do.zp = self.image.mdh.getEntry('Protocol.DataStartsAt')
        else:
            self.do.zp = self.image.mdh.getEntry('EstimatedLaserOnFrameNo')
예제 #18
0
    def __init__(self, dsviewer):
        self.dsviewer = dsviewer
        if 'tq' in dir(dsviewer):
            self.tq = dsviewer.tq
        else:
            self.tq = None
        
        self.image = dsviewer.image
        self.view = dsviewer.view
        self.do = dsviewer.do

        #this should only occur for files types which we weren't expecting to process
        #as LM data (eg tiffs)
        if not 'EstimatedLaserOnFrameNo' in self.image.mdh.getEntryNames():
            from PYME.Analysis import MetaData
            #try:
            MetaData.fillInBlanks(self.image.mdh, self.image.dataSource)
            #except IndexError:
            #    pass
            

        if 'fitResults' in dir(self.image):
            self.fitResults = self.image.fitResults
        else:
            self.fitResults = []
        
        if 'resultsMdh' in dir(self.image):
            self.resultsMdh = self.image.resultsMdh

        mTasks = wx.Menu()
        TASKS_STANDARD_2D = wx.NewId()
        TASKS_CALIBRATE_SPLITTER = wx.NewId()
        TASKS_2D_SPLITTER = wx.NewId()
        TASKS_3D = wx.NewId()
        TASKS_3D_SPLITTER = wx.NewId()
        TASKS_PRI = wx.NewId()
        mTasks.Append(TASKS_STANDARD_2D, "Normal 2D analysis", "", wx.ITEM_NORMAL)
        mTasks.Append(TASKS_CALIBRATE_SPLITTER, "Calibrating the splitter", "", wx.ITEM_NORMAL)
        mTasks.Append(TASKS_2D_SPLITTER, "2D with splitter", "", wx.ITEM_NORMAL)
        mTasks.Append(TASKS_3D, "3D analysis", "", wx.ITEM_NORMAL)
        mTasks.Append(TASKS_3D_SPLITTER, "3D with splitter", "", wx.ITEM_NORMAL)
        mTasks.Append(TASKS_PRI, "PRI", "", wx.ITEM_NORMAL)
        self.dsviewer.menubar.Append(mTasks, "Set defaults for")
        
        wx.EVT_MENU(self.dsviewer, TASKS_CALIBRATE_SPLITTER, self.OnCalibrateSplitter)
        wx.EVT_MENU(self.dsviewer, TASKS_STANDARD_2D, self.OnStandard2D)
        wx.EVT_MENU(self.dsviewer, TASKS_2D_SPLITTER, self.OnSpitter2D)
        wx.EVT_MENU(self.dsviewer, TASKS_3D, self.OnStandard3D)
        wx.EVT_MENU(self.dsviewer, TASKS_3D_SPLITTER, self.OnSpliter3D)
        wx.EVT_MENU(self.dsviewer, TASKS_PRI, self.OnPRI3D)
        
        BG_SUBTRACT = wx.NewId()
        self.dsviewer.view_menu.AppendCheckItem(BG_SUBTRACT, 'Subtract Background')
        wx.EVT_MENU(self.dsviewer, BG_SUBTRACT, self.OnToggleBackground)
        

        #a timer object to update for us
        self.timer = mytimer()
        self.timer.Start(10000)

        self.analDispMode = 'z'

        self.numAnalysed = 0
        self.numEvents = 0
        
        dsviewer.pipeline = pipeline.Pipeline()
        self.ds = None

        dsviewer.paneHooks.append(self.GenPointFindingPanel)
        dsviewer.paneHooks.append(self.GenAnalysisPanel)
        dsviewer.paneHooks.append(self.GenFitStatusPanel)

        dsviewer.updateHooks.append(self.update)
        dsviewer.statusHooks.append(self.GetStatusText)

        if 'Protocol.DataStartsAt' in self.image.mdh.getEntryNames():
            self.do.zp = self.image.mdh.getEntry('Protocol.DataStartsAt')
        else:
            self.do.zp = self.image.mdh.getEntry('EstimatedLaserOnFrameNo')