def execute(self, namespace):
        from PYME.IO.image import ImageBounds
        inp = namespace[self.inputLocalizations]
        if not isinstance(inp, tabular.ColourFilter):
            cf = tabular.ColourFilter(inp, None)

            print('Created colour filter with chans: %s' % cf.getColourChans())
            cf.mdh = inp.mdh
        else:
            cf = inp

        #default to taking min and max localizations as image bounds
        imb = ImageBounds.estimateFromSource(inp)

        if self.zBoundsMode == 'min-max':
            self.zBounds[0], self.zBounds[1] = float(imb.z0), float(imb.z1)

        if (self.xyBoundsMode == 'inherit'
            ) and not (getattr(inp, 'imageBounds', None) is None):
            imb = inp.imageBounds
        elif self.xyBoundsMode == 'metadata':
            imb = ImageBounds.extractFromMetadata(inp.mdh)
        elif self.xyBoundsMode == 'manual':
            imb.x0, imb.y0, imb.x1, imb.y1 = self.manualXYBounds

        cf.imageBounds = imb

        renderer = renderers.RENDERERS[str(self.renderingModule)](None, cf)

        namespace[self.outputImage] = renderer.Generate(self.trait_get())
Beispiel #2
0
    def OnGenEvents(self, event):
        from PYME.simulation import locify
        #from PYME.Acquire.Hardware.Simulator import wormlike2
        from PYME.IO import tabular
        from PYME.IO.image import ImageBounds
        import pylab

        #wc = wormlike2.wormlikeChain(100)

        pipeline = self.visFr.pipeline
        pipeline.filename = 'Simulation'

        pylab.figure()
        pylab.plot(self.xp, self.yp, 'x')  #, lw=2)
        if isinstance(self.source, WormlikeSource):
            pylab.plot(self.xp, self.yp, lw=2)

        res = locify.eventify(self.xp,
                              self.yp,
                              self.meanIntensity,
                              self.meanDuration,
                              self.backgroundIntensity,
                              self.meanEventNumber,
                              self.scaleFactor,
                              self.meanTime,
                              z=self.zp)

        pylab.plot(res['fitResults']['x0'], res['fitResults']['y0'], '+')

        ds = tabular.MappingFilter(tabular.FitResultsSource(res))

        if isinstance(self.source, ImageSource):
            pipeline.imageBounds = image.openImages[
                self.source.image].imgBounds
        else:
            pipeline.imageBounds = ImageBounds.estimateFromSource(ds)

        pipeline.addDataSource('Generated Points', ds)
        pipeline.selectDataSource('Generated Points')

        from PYME.IO.MetaDataHandler import NestedClassMDHandler
        pipeline.mdh = NestedClassMDHandler()
        pipeline.mdh['Camera.ElectronsPerCount'] = 1
        pipeline.mdh['Camera.TrueEMGain'] = 1
        pipeline.mdh['Camera.CycleTime'] = 1
        pipeline.mdh['voxelsize.x'] = .110

        try:
            pipeline.filterKeys.pop('sig')
        except:
            pass

        pipeline.Rebuild()
        if len(self.visFr.layers) < 1:
            self.visFr.add_pointcloud_layer(
            )  #TODO - move this logic so that layer added automatically when datasource is added?
        #self.visFr.CreateFoldPanel()
        self.visFr.SetFit()
Beispiel #3
0
    def execute(self, namespace):
        from PYME.IO.image import ImageBounds, ImageStack
        from PYME.IO.MetaDataHandler import DictMDHandler, origin_nm

        shell = namespace[self.input_shell]
        if isinstance(shell, tabular.TabularBase):
            from PYME.Analysis.points import spherical_harmonics
            shell = spherical_harmonics.ScaledShell.from_tabular(shell)
        image_bound_source = namespace[self.input_image_bound_source]
        # TODO - make bounds estimation more generic - e.g. to match an existing image.
        b = ImageBounds.estimateFromSource(image_bound_source)
        ox, oy, _ = origin_nm(image_bound_source.mdh)

        nx = np.ceil(
            (np.ceil(b.x1) - np.floor(b.x0)) / self.voxelsize_nm[0]) + 1
        ny = np.ceil(
            (np.ceil(b.y1) - np.floor(b.y0)) / self.voxelsize_nm[1]) + 1
        nz = np.ceil(
            (np.ceil(b.z1) - np.floor(b.z0)) / self.voxelsize_nm[2]) + 1

        x = np.arange(np.floor(b.x0), b.x0 + nx * self.voxelsize_nm[0],
                      self.voxelsize_nm[0])
        y = np.arange(np.floor(b.y0), b.y0 + ny * self.voxelsize_nm[1],
                      self.voxelsize_nm[1])
        z = np.arange(np.floor(b.z0), b.z0 + nz * self.voxelsize_nm[2],
                      self.voxelsize_nm[2])
        logger.debug('mask size %s' % ((len(x), len(y), len(z)), ))

        xx, yy, zz = np.meshgrid(x, y, z, indexing='ij')

        inside = shell.check_inside(xx.ravel(), yy.ravel(), zz.ravel())
        inside = np.reshape(inside, xx.shape)

        mdh = DictMDHandler({
            'voxelsize.x': self.voxelsize_nm[0] / 1e3,
            'voxelsize.y': self.voxelsize_nm[1] / 1e3,
            'voxelsize.z': self.voxelsize_nm[2] / 1e3,
            'ImageBounds.x0': x.min(),
            'ImageBounds.x1': x.max(),
            'ImageBounds.y0': y.min(),
            'ImageBounds.y1': y.max(),
            'ImageBounds.z0': z.min(),
            'ImageBounds.z1': z.max(),
            'Origin.x': ox + b.x0,
            'Origin.y': oy + b.y0,
            'Origin.z': b.z0
        })

        namespace[self.output] = ImageStack(data=inside,
                                            mdh=mdh,
                                            haveGUI=False)
    def OpenFile(self, filename='', ds=None, clobber_recipe=True, **kwargs):
        """Open a file - accepts optional keyword arguments for use with files
        saved as .txt and .mat. These are:
            
            FieldNames: a list of names for the fields in the text file or
                        matlab variable.
            VarName:    the name of the variable in the .mat file which 
                        contains the data.
            SkipRows:   Number of header rows to skip for txt file data
            
            PixelSize:  Pixel size if not in nm
            
        """

        #close any files we had open previously
        while len(self.filesToClose) > 0:
            self.filesToClose.pop().close()

        # clear our state
        # nb - equivalent to clearing recipe namespace
        self.dataSources.clear()

        if clobber_recipe:
            # clear any processing modules from the pipeline
            # call with clobber_recipe = False in a 'Open a new file with the processing pipeline I've set up' use case
            # TODO: Add an "File-->Open [preserving recipe]" menu option or similar
            self.recipe.modules = []

        if 'zm' in dir(self):
            del self.zm
        self.filter = None
        self.mapping = None
        self.colourFilter = None
        self.events = None
        self.mdh = MetaDataHandler.NestedClassMDHandler()

        self.filename = filename

        if ds is None:
            from PYME.IO import unifiedIO  # TODO - what is the launch time penalty here for importing clusterUI and finding a nameserver?

            # load from file(/cluster, downloading a copy of the file if needed)
            with unifiedIO.local_or_temp_filename(filename) as fn:
                # TODO - check that loading isn't lazy (i.e. we need to make a copy of data in memory whilst in the
                # context manager in order to be safe with unifiedIO and cluster data). From a quick look, it would seem
                # that _ds_from_file() copies the data, but potentially keeps the file open which could be problematic.
                # This won't effect local file loading even if loading is lazy (i.e. shouldn't cause a regression)
                ds = self._ds_from_file(fn, **kwargs)
                self.events = getattr(ds, 'events', None)
                self.mdh.copyEntriesFrom(ds.mdh)

        # skip the MappingFilter wrapping, etc. in self.addDataSource and add this datasource as-is
        self.dataSources['FitResults'] = ds

        # Fit module specific filter settings
        # TODO - put all the defaults here and use a local variable rather than in __init__ (self.filterKeys is largely an artifact of pre-recipe based pipeline)
        if 'Analysis.FitModule' in self.mdh.getEntryNames():
            fitModule = self.mdh['Analysis.FitModule']
            if 'Interp' in fitModule:
                self.filterKeys['A'] = (5, 100000)
            if fitModule == 'SplitterShiftEstFR':
                self.filterKeys['fitError_dx'] = (0, 10)
                self.filterKeys['fitError_dy'] = (0, 10)

        if clobber_recipe:
            from PYME.recipes.localisations import ProcessColour, Pipelineify
            from PYME.recipes.tablefilters import FilterTable

            add_pipeline_variables = Pipelineify(
                self.recipe,
                inputFitResults='FitResults',
                pixelSizeNM=kwargs.get('PixelSize', 1.),
                outputLocalizations='Localizations')
            self.recipe.add_module(add_pipeline_variables)

            #self._get_dye_ratios_from_metadata()

            colour_mapper = ProcessColour(self.recipe,
                                          input='Localizations',
                                          output='colour_mapped')
            self.recipe.add_module(colour_mapper)
            self.recipe.add_module(
                FilterTable(self.recipe,
                            inputName='colour_mapped',
                            outputName='filtered_localizations',
                            filters={
                                k: list(v)
                                for k, v in self.filterKeys.items()
                                if k in ds.keys()
                            }))
        else:
            logger.warn(
                'Opening file without clobbering recipe, filter and ratiometric colour settings might not be handled properly'
            )
            # FIXME - should we update filter keys and/or make the filter more robust
            # FIXME - do we need to do anything about colour settings?

        self.recipe.execute()
        self.filterKeys = {}
        if 'filtered_localizations' in self.dataSources.keys():
            self.selectDataSource(
                'filtered_localizations')  #NB - this rebuilds the pipeline
        else:
            # TODO - replace / remove this fallback with something better. This is currently required
            # when we use/abuse the pipeline in dh5view, but that should ideally be replaced with
            # something cleaner. This (and case above) should probably also be conditional on `clobber_recipe`
            # as if opening with an existing recipe we would likely want to keep selectedDataSource constant as well.
            self.selectDataSource('FitResults')

        # FIXME - we do this already in pipelinify, maybe we can avoid doubling up?
        self.ev_mappings, self.eventCharts = _processEvents(
            ds, self.events, self.mdh)  # extract information from any events
        # Retrieve or estimate image bounds
        if False:  # 'imgBounds' in kwargs.keys():
            # TODO - why is this disabled? Current usage would appear to be when opening from LMAnalysis
            # during real-time localization, to force image bounds to match raw data, but also potentially useful
            # for other scenarios where metadata is not fully present.
            self.imageBounds = kwargs['imgBounds']
        elif ('scanx' not in self.selectedDataSource.keys()
              or 'scany' not in self.selectedDataSource.keys()
              ) and 'Camera.ROIWidth' in self.mdh.getEntryNames():
            self.imageBounds = ImageBounds.extractFromMetadata(self.mdh)
        else:
            self.imageBounds = ImageBounds.estimateFromSource(
                self.selectedDataSource)
Beispiel #5
0
    def OpenFile(self, filename= '', ds = None, **kwargs):
        """Open a file - accepts optional keyword arguments for use with files
        saved as .txt and .mat. These are:
            
            FieldNames: a list of names for the fields in the text file or
                        matlab variable.
            VarName:    the name of the variable in the .mat file which 
                        contains the data.
            SkipRows:   Number of header rows to skip for txt file data
            
            PixelSize:  Pixel size if not in nm
            
        """
        
        #close any files we had open previously
        while len(self.filesToClose) > 0:
            self.filesToClose.pop().close()
        
        #clear our state
        self.dataSources.clear()
        if 'zm' in dir(self):
            del self.zm
        self.filter = None
        self.mapping = None
        self.colourFilter = None
        self.events = None
        self.mdh = MetaDataHandler.NestedClassMDHandler()
        
        self.filename = filename
        
        if ds is None:
            #load from file
            ds = self._ds_from_file(filename, **kwargs)

            
        #wrap the data source with a mapping so we can fiddle with things
        #e.g. combining z position and focus 
        mapped_ds = tabular.MappingFilter(ds)

        
        if 'PixelSize' in kwargs.keys():
            mapped_ds.addVariable('pixelSize', kwargs['PixelSize'])
            mapped_ds.setMapping('x', 'x*pixelSize')
            mapped_ds.setMapping('y', 'y*pixelSize')

        #extract information from any events
        self.ev_mappings, self.eventCharts = _processEvents(mapped_ds, self.events, self.mdh)



        #Fit module specific filter settings        
        if 'Analysis.FitModule' in self.mdh.getEntryNames():
            fitModule = self.mdh['Analysis.FitModule']
            
            #print 'fitModule = %s' % fitModule
            
            if 'Interp' in fitModule:
                self.filterKeys['A'] = (5, 100000)
            
            if 'LatGaussFitFR' in fitModule:
                mapped_ds.addColumn('nPhotons', getPhotonNums(mapped_ds, self.mdh))

            if 'SplitterFitFNR' in fitModule:
                mapped_ds.addColumn('nPhotonsg', getPhotonNums({'A': mapped_ds['fitResults_Ag'], 'sig': mapped_ds['fitResults_sigma']}, self.mdh))
                mapped_ds.addColumn('nPhotonsr', getPhotonNums({'A': mapped_ds['fitResults_Ar'], 'sig': mapped_ds['fitResults_sigma']}, self.mdh))
                mapped_ds.setMapping('nPhotons', 'nPhotonsg+nPhotonsr')

            if fitModule == 'SplitterShiftEstFR':
                self.filterKeys['fitError_dx'] = (0,10)
                self.filterKeys['fitError_dy'] = (0,10)

        #self._get_dye_ratios_from_metadata()

        self.addDataSource('Localizations', mapped_ds)

        # Retrieve or estimate image bounds
        if False:  # 'imgBounds' in kwargs.keys():
            self.imageBounds = kwargs['imgBounds']
        elif (not (
                'scanx' in mapped_ds.keys() or 'scany' in mapped_ds.keys())) and 'Camera.ROIWidth' in self.mdh.getEntryNames():
            self.imageBounds = ImageBounds.extractFromMetadata(self.mdh)
        else:
            self.imageBounds = ImageBounds.estimateFromSource(mapped_ds)

        from PYME.recipes.localisations import ProcessColour
        from PYME.recipes.tablefilters import FilterTable
        
        self.colour_mapper = ProcessColour(self.recipe, input='Localizations', output='colour_mapped')
        #we keep a copy of this so that the colour panel can find it.
        self.recipe.add_module(self.colour_mapper)
        self.recipe.add_module(FilterTable(self.recipe, inputName='colour_mapped', outputName='filtered_localizations', filters={k:list(v) for k, v in self.filterKeys.items() if k in mapped_ds.keys()}))
        self.recipe.execute()
        self.filterKeys = {}
        self.selectDataSource('filtered_localizations') #NB - this rebuilds the pipeline
    def OnGenEvents(self, event):
        from PYME.simulation import locify
        #from PYME.Acquire.Hardware.Simulator import wormlike2
        from PYME.IO import tabular
        from PYME.IO.image import ImageBounds
        # import pylab
        import matplotlib.pyplot as plt

        #wc = wormlike2.wormlikeChain(100)

        pipeline = self.visFr.pipeline
        pipeline.filename = 'Simulation'

        plt.figure()
        plt.plot(self.xp, self.yp, 'x')  #, lw=2)
        if isinstance(self.source, WormlikeSource):
            plt.plot(self.xp, self.yp, lw=2)

        if self.mode == 'STORM':
            res = locify.eventify(self.xp,
                                  self.yp,
                                  self.meanIntensity,
                                  self.meanDuration,
                                  self.backgroundIntensity,
                                  self.meanEventNumber,
                                  self.scaleFactor,
                                  self.meanTime,
                                  z=self.zp)
        else:
            res = locify.eventify2(self.xp,
                                   self.yp,
                                   self.meanIntensity,
                                   self.meanDuration,
                                   self.backgroundIntensity,
                                   self.meanEventNumber,
                                   self.scaleFactor,
                                   self.meanTime,
                                   z=self.zp)

        plt.plot(res['fitResults']['x0'], res['fitResults']['y0'], '+')

        ds = tabular.MappingFilter(tabular.FitResultsSource(res))

        try:
            # some data sources (current ImageSource) have image bound info. Use this if available
            # this could fail on either an AttributeError (if the data source doesn't implement bounds
            # or another error if something fails in get_bounds(). Only catch the AttributeError, as we have
            # should not be handling other errors here.
            pipeline.imageBounds = self.source.get_bounds()
        except AttributeError:
            pipeline.imageBounds = ImageBounds.estimateFromSource(ds)

        pipeline.addDataSource('Generated Points', ds)
        pipeline.selectDataSource('Generated Points')

        from PYME.IO.MetaDataHandler import NestedClassMDHandler
        pipeline.mdh = NestedClassMDHandler()
        pipeline.mdh['Camera.ElectronsPerCount'] = 1
        pipeline.mdh['Camera.TrueEMGain'] = 1
        pipeline.mdh['Camera.CycleTime'] = 1
        pipeline.mdh['voxelsize.x'] = .110
        # some info about the parameters
        pipeline.mdh['GeneratedPoints.MeanIntensity'] = self.meanIntensity
        pipeline.mdh['GeneratedPoints.MeanDuration'] = self.meanDuration
        pipeline.mdh['GeneratedPoints.MeanEventNumber'] = self.meanEventNumber
        pipeline.mdh[
            'GeneratedPoints.BackgroundIntensity'] = self.backgroundIntensity
        pipeline.mdh['GeneratedPoints.ScaleFactor'] = self.scaleFactor
        pipeline.mdh['GeneratedPoints.MeanTime'] = self.meanTime
        pipeline.mdh['GeneratedPoints.Mode'] = self.mode
        # the source info
        self.source.genMetaData(pipeline.mdh)

        try:
            pipeline.filterKeys.pop('sig')
        except:
            pass

        pipeline.Rebuild()
        if len(self.visFr.layers) < 1:
            self.visFr.add_pointcloud_layer(
            )  #TODO - move this logic so that layer added automatically when datasource is added?
        #self.visFr.CreateFoldPanel()
        self.visFr.SetFit()