Ejemplo n.º 1
0
    def _loadBioformats(self, filename):
        #from PYME.IO.FileUtils import readTiff
        from PYME.IO.DataSources import BioformatsDataSource

        try:
            import bioformats
        except ImportError:
            logger.exception(
                'Error importing bioformats - is the python-bioformats module installed?'
            )
            raise

        #mdfn = self.FindAndParseMetadata(filename)
        print("Bioformats:loading data")
        self.dataSource = BioformatsDataSource.DataSource(filename, None)
        self.mdh = MetaDataHandler.NestedClassMDHandler(MetaData.BareBones)

        print("Bioformats:loading metadata")
        OMEXML = bioformats.get_omexml_metadata(filename).encode('utf8')
        print("Bioformats:parsing metadata")
        OMEmd = MetaDataHandler.OMEXMLMDHandler(OMEXML)
        self.mdh.copyEntriesFrom(OMEmd)
        print("Bioformats:done")

        print(self.dataSource.shape)
        self.dataSource = BufferedDataSource.DataSource(
            self.dataSource, min(self.dataSource.getNumSlices(), 50))
        self.data = self.dataSource  #this will get replaced with a wrapped version

        print(self.data.shape)

        #from PYME.ParallelTasks.relativeFiles import getRelFilename
        self.seriesName = getRelFilename(filename)

        self.mode = 'default'
Ejemplo n.º 2
0
    def start(self):
        self._gen_weights()
        self.genCoords()

        #metadata handling
        self.mdh = MetaDataHandler.NestedClassMDHandler()
        self.mdh.setEntry('StartTime', time.time())
        self.mdh.setEntry('AcquisitionType', 'Tiled overview')

        #loop over all providers of metadata
        for mdgen in MetaDataHandler.provideStartMetadata:
            mdgen(self.mdh)

        self._x0 = self.xp[0]
        self._y0 = self.yp[0]

        self._pixel_size = self.mdh.getEntry('voxelsize.x')
        self.background = self.mdh.getOrDefault('Camera.ADOffset',
                                                self.background)

        # make our x0, y0 independent of the camera ROI setting
        x0_cam, y0_cam = MetaDataHandler.get_camera_physical_roi_origin(
            self.mdh)

        x0 = self._x0 + self._pixel_size * x0_cam
        y0 = self._y0 + self._pixel_size * y0_cam

        self.P = tile_pyramid.ImagePyramid(self._tiledir,
                                           self._base_tile_size,
                                           x0=x0,
                                           y0=y0,
                                           pixel_size=self._pixel_size)

        pointScanner.PointScanner.start(self)
Ejemplo n.º 3
0
    def _loadh5(self, filename):
        """Load PYMEs semi-custom HDF5 image data format. Offloads all the
        hard work to the HDFDataSource class"""
        import tables
        from PYME.IO.DataSources import HDFDataSource, BGSDataSource
        from PYME.IO import tabular

        self.dataSource = HDFDataSource.DataSource(filename, None)
        #chain on a background subtraction data source, so we can easily do
        #background subtraction in the GUI the same way as in the analysis
        self.data = BGSDataSource.DataSource(
            self.dataSource)  #this will get replaced with a wrapped version

        if 'MetaData' in self.dataSource.h5File.root:  #should be true the whole time
            self.mdh = MetaData.TIRFDefault
            self.mdh.copyEntriesFrom(
                MetaDataHandler.HDFMDHandler(self.dataSource.h5File))
        else:
            self.mdh = MetaData.TIRFDefault
            import wx
            wx.MessageBox(
                "Carrying on with defaults - no gaurantees it'll work well",
                'ERROR: No metadata found in file ...', wx.OK)
            print(
                "ERROR: No metadata fond in file ... Carrying on with defaults - no gaurantees it'll work well"
            )

        #attempt to estimate any missing parameters from the data itself
        try:
            MetaData.fillInBlanks(self.mdh, self.dataSource)
        except:
            logger.exception('Error attempting to populate missing metadata')

        #calculate the name to use when we do batch analysis on this
        #from PYME.IO.FileUtils.nameUtils import getRelFilename
        self.seriesName = getRelFilename(filename)

        #try and find a previously performed analysis
        fns = filename.split(os.path.sep)
        cand = os.path.sep.join(fns[:-2] + [
            'analysis',
        ] + fns[-2:]) + 'r'
        print(cand)
        if False:  #os.path.exists(cand):
            h5Results = tables.open_file(cand)

            if 'FitResults' in dir(h5Results.root):
                self.fitResults = h5Results.root.FitResults[:]
                self.resultsSource = tabular.H5RSource(h5Results)

                self.resultsMdh = MetaData.TIRFDefault
                self.resultsMdh.copyEntriesFrom(
                    MetaDataHandler.HDFMDHandler(h5Results))

        self.events = self.dataSource.getEvents()

        self.mode = 'LM'
Ejemplo n.º 4
0
    def execute(self, namespace):
        self._namespace = namespace
        import multiprocessing
#        from PYME.util import mProfile        
#        mProfile.profileOn(["frc.py"])
        
        if self.multiprocessing:
            proccess_count = np.clip(2, 1, multiprocessing.cpu_count()-1)
            self._pool = multiprocessing.Pool(processes=proccess_count)
        
        pipeline = namespace[self.inputName]
        mapped_pipeline = tabular.mappingFilter(pipeline)
        self._pixel_size_in_nm = self.pixel_size_in_nm * np.ones(3, dtype=np.float)
        
        image_pair = self.generate_image_pair(mapped_pipeline)
        
        image_pair = self.preprocess_images(image_pair)
            
        # Should use DensityMapping recipe eventually when it is ready.
        mdh = MetaDataHandler.NestedClassMDHandler()
        mdh['Rendering.Method'] = "np.histogramdd"
        if 'imageID' in pipeline.mdh.getEntryNames():
            mdh['Rendering.SourceImageID'] = pipeline.mdh['imageID']
        try:
            mdh['Rendering.SourceFilename'] = pipeline.resultsSource.h5f.filename
        except:
            pass        
        mdh.Source = MetaDataHandler.NestedClassMDHandler(pipeline.mdh)        
        mdh['Rendering.NEventsRendered'] = [image_pair[0].sum(), image_pair[1].sum()]
        mdh['voxelsize.units'] = 'um'
        mdh['voxelsize.x'] = self.pixel_size_in_nm * 1E-3
        mdh['voxelsize.y'] = self.pixel_size_in_nm * 1E-3
        
        ims = ImageStack(data=np.stack(image_pair, axis=-1), mdh=mdh)
        namespace[self.output_images] = ims
        
#        if self.plot_graphs:
#            from PYME.DSView.dsviewer import ViewIm3D
#            ViewIm3D(ims)
        
        frc_res, rawdata = self.calculate_FRC_from_images(image_pair, pipeline.mdh)
        
#        smoothed_frc = self.SmoothFRC(frc_freq, frc_corr)
#        
#        self.CalculateThreshold(frc_freq, frc_corr, smoothed_frc)
        
        namespace[self.output_frc_dict] = frc_res
        namespace[self.output_frc_raw] = rawdata
        
        if self.multiprocessing:
            self._pool.close()
            self._pool.join()
        
#        mProfile.profileOff()
#        mProfile.report()
        
        self.save_to_file(namespace)
Ejemplo n.º 5
0
    def mdh(self):
        if self._mdh is None:
            try:
                self._mdh = MetaDataHandler.HDFMDHandler(self._h5file)
                if self.mode == 'r':
                    self._mdh = MetaDataHandler.NestedClassMDHandler(self._mdh)
            except IOError:
                # our file was opened in read mode and didn't have any metadata to start with
                self._mdh = MetaDataHandler.NestedClassMDHandler()

        return self._mdh
Ejemplo n.º 6
0
def list_h5(filename):
    import tables
    from PYME.IO import MetaDataHandler
    from PYME.IO import tabular
    from PYME.IO import unifiedIO
    import json

    with unifiedIO.local_or_temp_filename(filename) as fn:
        with tables.open_file(fn, mode='r') as h5f:
            #make sure our hdf file gets closed

            try:
                mdh = MetaDataHandler.NestedClassMDHandler(
                    MetaDataHandler.HDFMDHandler(h5f))
                print('Metadata:\n____________')
                print(repr(mdh))
            except tables.FileModeError:  # Occurs if no metadata is found, since we opened the table in read-mode
                logger.warning(
                    'No metadata found, proceeding with empty metadata')
                mdh = MetaDataHandler.NestedClassMDHandler()

            print('\n\n')

            for t in h5f.list_nodes('/'):
                # FIXME - The following isinstance tests are not very safe (and badly broken in some cases e.g.
                # PZF formatted image data, Image data which is not in an EArray, etc ...)
                # Note that EArray is only used for streaming data!
                # They should ideally be replaced with more comprehensive tests (potentially based on array or dataset
                # dimensionality and/or data type) - i.e. duck typing. Our strategy for images in HDF should probably
                # also be improved / clarified - can we use hdf attributes to hint at the data intent? How do we support
                # > 3D data?

                if not isinstance(t, tables.Group):
                    print(t.name)
                    print('______________')

                    if isinstance(t, tables.VLArray):
                        data = h5f.get_node(h5f.root, t.name)
                        print('Ragged (VLArray) with %d rows' % len(data))
                        print('Row 0: %s' % data)

                    elif isinstance(t, tables.table.Table):
                        #  pipe our table into h5r or hdf source depending on the extension
                        data = h5f.get_node(h5f.root, t.name)

                        print('Table with %d rows\n dtype = %s' %
                              (len(data), data[0].dtype))

                    elif isinstance(t, tables.EArray):
                        data = h5f.get_node(h5f.root, t.name)

                        print('Image, shape = %s' % data.shape)

                    print('\n\n')
Ejemplo n.º 7
0
    def Generate(self, settings):
        mdh = MetaDataHandler.NestedClassMDHandler()
        copy_sample_metadata(self.pipeline.mdh, mdh)
        mdh['Rendering.Method'] = self.name
        if 'imageID' in self.pipeline.mdh.getEntryNames():
            mdh['Rendering.SourceImageID'] = self.pipeline.mdh['imageID']
        mdh['Rendering.SourceFilename'] = getattr(self.pipeline, 'filename',
                                                  '')
        mdh['Rendering.NEventsRendered'] = len(
            self.pipeline[self.pipeline.keys(
            )[0]])  # in future good to use colourfilter for per channel info?
        mdh.Source = MetaDataHandler.NestedClassMDHandler(self.pipeline.mdh)

        for cb in renderMetadataProviders:
            cb(mdh)

        pixelSize = settings['pixelSize']
        sliceThickness = settings['zSliceThickness']

        status = statusLog.StatusLogger('Generating %s Image ...' % self.name)

        # get image bounds at integer multiple of pixel size
        imb = self._get_image_bounds(pixelSize, sliceThickness,
                                     *settings.get('zBounds', [None, None]))

        #record the pixel origin in nm from the corner of the camera for futrue overlays
        ox, oy, oz = MetaDataHandler.origin_nm(mdh)
        if not imb.z0 == 0:
            # single plane in z stack
            # FIXME - what is z for 3D fitting at a single focal plane? Check for pipeline['focus']==0 instead?
            oz = 0

        mdh['Origin.x'] = ox + imb.x0
        mdh['Origin.y'] = oy + imb.y0
        mdh['Origin.z'] = oz + imb.z0

        colours = settings['colours']
        oldC = self.colourFilter.currentColour

        ims = []

        for c in colours:
            self.colourFilter.setColour(c)
            ims.append(np.atleast_3d(self.genIm(settings, imb, mdh)))

        self.colourFilter.setColour(oldC)

        return GeneratedImage(ims,
                              imb,
                              pixelSize,
                              sliceThickness,
                              colours,
                              mdh=mdh)
Ejemplo n.º 8
0
def testFrames(detThresh = 0.9, offset = 0):
    plt.close('all')
    matplotlib.interactive(False)
    plt.clf()
    sq = min(mdh.getEntry('EstimatedLaserOnFrameNo') + 1000, dataSource.getNumSlices()/4)
    zps = np.array(range(mdh.getEntry('EstimatedLaserOnFrameNo') + 20, mdh.getEntry('EstimatedLaserOnFrameNo') + 24)  + range(sq, sq + 4) + range(dataSource.getNumSlices()/2,dataSource.getNumSlices() /2+4))
    zps += offset
    fitMod = cFitType.GetStringSelection()
    #bgFrames = int(tBackgroundFrames.GetValue())
    bgFrames = [int(v) for v in tBackgroundFrames.GetValue().split(':')]
    for i in range(12):
        #if 'Analysis.NumBGFrames' in md.getEntryNames():
        #bgi = range(max(zps[i] - bgFrames,mdh.getEntry('EstimatedLaserOnFrameNo')), zps[i])
        bgi = range(max(zps[i] + bgFrames[0],mdh.getEntry('EstimatedLaserOnFrameNo')), max(zps[i] + bgFrames[1],mdh.getEntry('EstimatedLaserOnFrameNo')))
        #else:
        #    bgi = range(max(zps[i] - 10,md.EstimatedLaserOnFrameNo), zps[i])
        if 'Splitter' in fitMod:
            ft = remFitBuf.fitTask(seriesName, zps[i], detThresh, MetaDataHandler.NestedClassMDHandler(mdh), 'SplitterObjFindR', bgindices=bgi, SNThreshold=True)
        else:
            ft = remFitBuf.fitTask(seriesName, zps[i], detThresh, MetaDataHandler.NestedClassMDHandler(mdh), 'LatObjFindFR', bgindices=bgi, SNThreshold=True)
        res = ft()
        xp = np.floor(i/4)/3.
        yp = (3 - i%4)/4.
        #print xp, yp
        plt.axes((xp,yp, 1./6,1./4.5))
        #d = ds[zps[i], :,:].squeeze().T
        d = dataSource.getSlice(zps[i]).T
        plt.imshow(d, cmap=cm.hot, interpolation='nearest', hold=False, clim=(np.median(d.ravel()), d.max()))
        plt.title('Frame %d' % zps[i])
        plt.xlim(0, d.shape[1])
        plt.ylim(0, d.shape[0])
        plt.xticks([])
        plt.yticks([])
        #print 'i = %d, ft.index = %d' % (i, ft.index)
        #subplot(4,6,2*i+13)
        xp += 1./6
        plt.axes((xp,yp, 1./6,1./4.5))
        d = ft.ofd.filteredData.T
        #d = ft.data.squeeze().T
        plt.imshow(d, cmap=cm.hot, interpolation='nearest', hold=False, clim=(np.median(d.ravel()), d.max()))
        plt.plot([p.x for p in ft.ofd], [p.y for p in ft.ofd], 'o', mew=2, mec='g', mfc='none', ms=9)
        if ft.driftEst:
             plt.plot([p.x for p in ft.ofdDr], [p.y for p in ft.ofdDr], 'o', mew=2, mec='b', mfc='none', ms=9)
        if ft.fitModule in remFitBuf.splitterFitModules:
                plt.plot([p.x for p in ft.ofd], [d.shape[0] - p.y for p in ft.ofd], 'o', mew=2, mec='g', mfc='none', ms=9)
        #axis('tight')
        plt.xlim(0, d.shape[1])
        plt.ylim(0, d.shape[0])
        plt.xticks([])
        plt.yticks([])
    plt.show()
    matplotlib.interactive(True)
Ejemplo n.º 9
0
def fitallA(r2, mdh, zCoeffs, ns=1.51, axialShift=200.):
    mdh = MetaDataHandler.NestedClassMDHandler(mdh)
    mdh['Analysis.AxialShift'] = axialShift
    voxelsize = mdh.voxelsize_nm
    voxelsize.z = 50.
    zs = 50. * np.arange(-30., 31)
    p1 = fourierHNA.GenZernikeDPSF(zs, 70, zCoeffs, ns=ns)
    interpolator.setModel('foo', p1, voxelsize)

    estimator.splines.clear()
    estimator.calibrate(interpolator, mdh)

    sp_all = [
        startEstROI(r2[j], mdh, interpolator, estimator,
                    mdh['Analysis.ColourRatio'], mdh['Analysis.AxialShift'])
        for j in range(len(r2))
    ]

    fr = np.array([(fitROI(r2[j], mdh, interpolator, sp_all[j],
                           mdh['Analysis.ColourRatio'],
                           mdh['Analysis.AxialShift'])[0])
                   for j in range(len(r2))])

    #return mf[mf < (median(mf)+ 2*std(mf))].mean()
    return fr
Ejemplo n.º 10
0
    def to_hdf(self, filename, tablename='Data', keys=None, metadata=None):
        from PYME.IO import h5rFile, MetaDataHandler
        # NOTE that we ignore metadata input
        metadata = MetaDataHandler.NestedClassMDHandler()
        metadata[
            'spherical_harmonic_shell.standard_deviations'] = self.standard_deviations.tolist(
            )
        metadata[
            'spherical_harmonic_shell.scaling_factors'] = self.scaling_factors.tolist(
            )
        metadata[
            'spherical_harmonic_shell.principal_axes'] = self.principal_axes.tolist(
            )
        metadata[
            'spherical_harmonic_shell.summed_residuals'] = self._summed_residuals
        metadata['spherical_harmonic_shell.n_points_used_in_fitting'] = len(
            self.x)
        metadata['spherical_harmonic_shell.x0'] = self.x0
        metadata['spherical_harmonic_shell.y0'] = self.y0
        metadata['spherical_harmonic_shell.z0'] = self.z0
        metadata[
            'spherical_harmonic_shell.sampling_fraction'] = self.sampling_fraction

        with h5rFile.H5RFile(filename, 'a') as f:
            f.appendToTable(tablename, self.to_recarray(keys))
            f.updateMetadata(metadata)
Ejemplo n.º 11
0
def generateThumbnail(inputFile, thumbSize):
    global size
    #logging.debug('Input File: %s\n' % inputFile)
    #logging.debug('Ouput File: %s\n' % outputFile)
    #logging.debug('Thumb Size: %s\n' % thumbSize)

    h5f = tables.open_file(inputFile)

    dataSource = HDFDataSource.DataSource(inputFile, None)

    md = MetaData.genMetaDataFromSourceAndMDH(
        dataSource, MetaDataHandler.HDFMDHandler(h5f))

    xsize = h5f.root.ImageData.shape[1]
    ysize = h5f.root.ImageData.shape[2]

    if xsize > ysize:
        zoom = float(thumbSize) / xsize
    else:
        zoom = float(thumbSize) / ysize

    size = (int(xsize * zoom), int(ysize * zoom))

    im = h5f.root.ImageData[min(md.EstimatedLaserOnFrameNo + 10,
                                (h5f.root.ImageData.shape[0] -
                                 1)), :, :].astype('f')

    im = im.T - min(md.Camera.ADOffset, im.min())

    h5f.close()

    im = maximum(minimum(1 * (255 * im) / im.max(), 255), 0)

    return im.astype('uint8')
Ejemplo n.º 12
0
    def execute(self, namespace):
        from PYME.IO.FileUtils import readSpeckle
        from PYME.IO import MetaDataHandler
        import os

        fileInfo = {'SEP': os.sep}

        seriesLength = 100000

        mdh = MetaDataHandler.NestedClassMDHandler()
        mdh['voxelsize.x'] = .001  # default pixel size - FIXME
        mdh['voxelsize.y'] = .001

        #use a default sensor size of 512
        #this gets over-ridden below if we supply an image
        clip_region = [
            self.edgeRejectionPixels, self.edgeRejectionPixels,
            512 - self.edgeRejectionPixels, 512 - self.edgeRejectionPixels
        ]

        if not self.inputImage == '':
            inp = namespace[self.inputImage]
            mdh.update(inp.mdh)
            seriesLength = inp.data.shape[2]

            clip_region = [
                self.edgeRejectionPixels, self.edgeRejectionPixels,
                inp.data.shape[0] - self.edgeRejectionPixels,
                inp.data.shape[1] - self.edgeRejectionPixels
            ]

            try:
                fileInfo['DIRNAME'], fileInfo['IMAGENAME'] = os.path.split(
                    inp.filename)
                fileInfo['IMAGESTUB'] = fileInfo['IMAGENAME'].split('MM')[0]
            except:
                pass

        speckleFN = self.speckleFilename.format(**fileInfo)

        specks = readSpeckle.readSpeckles(speckleFN)
        traces = readSpeckle.gen_traces_from_speckles(
            specks,
            leadFrames=self.leadFrames,
            followFrames=self.followFrames,
            seriesLength=seriesLength,
            clipRegion=clip_region)

        #turn this into an inputFilter object
        inp = tabular.RecArraySource(traces)

        #create a mapping to covert the co-ordinates in pixels to co-ordinates in nm
        vs = mdh.voxelsize_nm
        map = tabular.MappingFilter(inp,
                                    x='x_pixels*%3.2f' % vs.x,
                                    y='y_pixels*%3.2f' % vs.y)

        map.mdh = mdh

        namespace[self.outputName] = map
Ejemplo n.º 13
0
    def __init__(self):
        #list of tuples  of form (class, chan, name) describing the instaled piezo channels
        self.piezos = []
        self.lasers = []
        self.hardwareChecks = []

        #entries should be of the form: "x" : (piezo, channel, multiplier)
        # where multiplyier is what to multiply by to get the units to micrometers
        self.positioning = {}
        self.joystick = None

        self.cam = None
        self.cameras = {}
        self.camControls = {}

        self.stackNum = 0

        #self.WantEventNotification = []

        self.StatusCallbacks = [
        ]  #list of functions which provide status information
        self.CleanupFunctions = []  #list of functions to be called at exit
        self.PACallbacks = [
        ]  #list of functions to be called when a new aquisator is created

        self.saturationThreshold = 16383  #14 bit
        self.lastFrameSaturated = False
        #self.cam.saturationIntervened = False

        self.microscope_name = None

        self.saturatedMessage = ''

        protocol.scope = self
        ccdCalibrator.setScope(self)
        self.initDone = False

        self._OpenSettingsDB()

        self.spoolController = SpoolController(self)  #, defDir, **kwargs)

        self.state = StateManager(self)

        self.state.registerHandler('ActiveCamera', self.GetActiveCameraName,
                                   self._SetCamera, True)
        self.state.registerHandler('Camera.IntegrationTime',
                                   self._GetActiveCameraIntegrationTime,
                                   self._SetActiveCameraIntegrationTime, True)
        self.state.registerHandler('Camera.ROI', self._GetActiveCameraROI,
                                   self._SetActiveCameraROI, True)
        self.state.registerHandler('Camera.Binning',
                                   self._GetActiveCameraBinning,
                                   self._SetActiveCameraBinning, True)

        self.actions = ActionManager(self)

        MetaDataHandler.provideStartMetadata.append(self.GenStartMetadata)

        #provision to set global metadata values in startup script
        self.mdh = MetaDataHandler.NestedClassMDHandler()
Ejemplo n.º 14
0
    def OnDeconvWiener(self, event):
        #from PYME.Deconv import weiner

        decMDH = MetaDataHandler.NestedClassMDHandler(self.image.mdh)
        decMDH['Deconvolution.OriginalFile'] = self.image.filename
        decMDH['Deconvolution.Method'] = 'Wiener'

        im = numpy.zeros(self.image.data.shape[:3], 'f4')

        decView = View3D(im,
                         'Deconvolution Result',
                         mdh=decMDH,
                         parent=self.dsviewer)

        decView.wienerPanel = WienerDeconvolver(decView, self.image,
                                                decView.image)

        self.pinfo1 = aui.AuiPaneInfo().Name("wienerPanel").Left(
        ).Caption('Wiener Filter').DestroyOnClose(True).CloseButton(
            False
        )  #.MinimizeButton(True).MinimizeMode(aui.AUI_MINIMIZE_CAPT_SMART|aui.AUI_MINIMIZE_POS_RIGHT)#.CaptionVisible(False)
        decView._mgr.AddPane(decView.wienerPanel, self.pinfo1)
        decView._mgr.Update()

        self.dsviewer.decView = decView
Ejemplo n.º 15
0
    def execute(self, namespace):
        from PYME.Analysis.points.astigmatism import astigTools
        from PYME.IO import unifiedIO
        import json

        inp = namespace[self.input_name]

        if 'mdh' not in dir(inp):
            raise RuntimeError('MapAstigZ needs metadata')

        if self.astigmatism_calibration_location == '':  # grab calibration from the metadata
            calibration_location = inp.mdh['Analysis.AstigmatismMapID']
        else:
            calibration_location = self.astigmatism_calibration_location

        s = unifiedIO.read(calibration_location)

        astig_calibrations = json.loads(s)

        mapped = tabular.MappingFilter(inp)

        z, zerr = astigTools.lookup_astig_z(mapped, astig_calibrations, self.rough_knot_spacing, plot=False)

        mapped.addColumn('astigmatic_z', z)
        mapped.addColumn('astigmatic_z_lookup_error', zerr)
        mapped.setMapping('z', 'astigmatic_z + z')

        mapped.mdh = MetaDataHandler.NestedClassMDHandler(inp.mdh)
        mapped.mdh['Analysis.astigmatism_calibration_used'] = calibration_location

        namespace[self.output_name] = mapped
Ejemplo n.º 16
0
    def execute(self, namespace):
        from PYME.Analysis.points.traveling_salesperson import sort

        points = namespace[self.input]

        try:
            positions = np.stack([points['x_um'], points['y_um']], axis=1)
        except KeyError:
            # units don't matter for these calculations, but we want to preserve them on the other side
            positions = np.stack([points['x'], points['y']], axis=1) / 1e3

        start_index = 0 if not self.start_from_corner else np.argmin(positions.sum(axis=1))

        positions, ogd, final_distance = sort.tsp_sort(positions, start_index, self.epsilon, return_path_length=True)

        out = tabular.DictSource({'x_um': positions[:, 0],
                                     'y_um': positions[:, 1]})
        out.mdh = MetaDataHandler.NestedClassMDHandler()
        try:
            out.mdh.copyEntriesFrom(points.mdh)
        except AttributeError:
            pass
        out.mdh['TravelingSalesperson.Distance'] = final_distance
        out.mdh['TravelingSalesperson.OriginalDistance'] = ogd

        namespace[self.output] = out
Ejemplo n.º 17
0
def test_GaussMultifitSR():
    """
    simple test to see if the multifit algorithm is working. We should detect roughly the same number of molecules
    as we simulated. This is only a loose test, and should pick up any critical reverse compatible breaks rather than
    actual fit performance.
    """
    from PYME.localization.FitFactories import GaussMultifitSR
    from PYME.IO import MetaDataHandler

    x, y, im = gen_image()

    mdh = MetaDataHandler.NestedClassMDHandler()
    mdh['Analysis.PSFSigma'] = 140.
    mdh['Analysis.ResidualMax'] = .5
    #mdh['Analysis.subtractBackground'] = False
    mdh['Camera.ReadNoise'] = 1.0
    mdh['Camera.NoiseFactor'] = 1.0
    mdh['Camera.ElectronsPerCount'] = 1.0
    mdh['Camera.TrueEMGain'] = 1.0
    mdh['voxelsize.x'] = .07
    mdh['voxelsize.y'] = .07

    ff = GaussMultifitSR.FitFactory(np.atleast_3d(im) - 2.0, mdh)
    res = ff.FindAndFit(1.8)

    nSim = len(x)
    nFound = len(res)

    print('nFound: %d, nSim: %d' % (nFound, nSim))
    assert (nFound > 0.5 * nSim and nFound < 2.0 * nSim)
Ejemplo n.º 18
0
    def execute(self, namespace):

        inp = namespace[self.inputName]

        # generate LineProfileHandler from tables
        handler = LineProfileHandler()
        handler._load_profiles_from_list(inp)

        fit_class = profile_fitters.ensemble_fitters[self.fit_type]
        self.fitter = fit_class(handler)

        if self.hold_ensemble_parameter_constant:
            self.fitter.fit_profiles(self.ensemble_parameter_guess)
        else:
            self.fitter.ensemble_fit(self.ensemble_parameter_guess)

        res = tabular.RecArraySource(self.fitter.results)

        # propagate metadata, if present
        res.mdh = MetaDataHandler.NestedClassMDHandler(
            getattr(inp, 'mdh', None))

        res.mdh['EnsembleFitProfiles.FitType'] = self.fit_type
        res.mdh[
            'EnsembleFitProfiles.EnsembleParameterGuess'] = self.ensemble_parameter_guess
        res.mdh[
            'EnsembleFitProfiles.HoldEnsembleParamConstant'] = self.hold_ensemble_parameter_constant

        namespace[self.outputName] = res
Ejemplo n.º 19
0
    def pushImagesDS(self, image):
        resultsFilename = _verifyResultsFilename(
            genResultFileName(image.seriesName))
        self.queueName = resultsFilename

        debugPrint('Results file = %s' % resultsFilename)

        self.resultsMdh = MetaDataHandler.NestedClassMDHandler(
            self.analysisMDH)
        self.resultsMdh['DataFileID'] = fileID.genDataSourceID(
            image.dataSource)

        mn = image.dataSource.moduleName
        #dsID = self.image.seriesName
        #if it's a buffered source, go back to underlying source
        if mn == 'BufferedDataSource':
            mn = image.dataSource.dataSource.moduleName

        self.tq.createQueue('DSTaskQueue',
                            self.queueName,
                            self.resultsMdh,
                            mn,
                            image.seriesName,
                            resultsFilename,
                            startAt=self.analysisMDH['Analysis.StartAt'])

        evts = image.dataSource.getEvents()
        if len(evts) > 0:
            self.tq.addQueueEvents(self.queueName, evts)

        debugPrint('Queue created')

        self.onImagesPushed.send(self)
Ejemplo n.º 20
0
    def execute(self, namespace):
        #from PYME.localization.FitFactories import DumbellFitR
        from PYME.IO import MetaDataHandler
        img = namespace[self.inputImage]

        md = MetaDataHandler.NestedClassMDHandler()
        #set metadata entries needed for fitting to suitable defaults
        md['Camera.ADOffset'] = img.data[:, :, 0].min()
        md['Camera.TrueEMGain'] = 1.0
        md['Camera.ElectronsPerCount'] = 1.0
        md['Camera.ReadNoise'] = 1.0
        md['Camera.NoiseFactor'] = 1.0

        #copy across the entries from the real image, replacing the defaults
        #if necessary
        md.copyEntriesFrom(img.mdh)

        inp = namespace[self.inputPositions]

        res = np.zeros(len(inp['x']), dtype=[('r%d' % r, 'f4') for r in self.radii])

        ff_t = -1

        aggFunc = getattr(self, '_get_%s' % self.mode)

        ps = img.pixelSize
        print('pixel size: %s' % ps)
        for x, y, t, i in zip(inp['x'], inp['y'], inp['t'], range(len(inp['x']))):
            for r in self.radii:
                res[i]['r%d' % r] = aggFunc(img.data, np.round(x / ps), np.round(y / ps), t, r)

        res = tabular.RecArraySource(res)
        res.mdh = md

        namespace[self.outputName] = res
Ejemplo n.º 21
0
def misfallA(r2,
             mdh,
             zCoeffs,
             ns=1.51,
             axialShift=None,
             colourRatio=None,
             beadSize=0):
    mdh = MetaDataHandler.NestedClassMDHandler(mdh)
    if not axialShift == None:
        mdh['Analysis.AxialShift'] = axialShift
    if not colourRatio == None:
        mdh['Analysis.ColourRatio'] = colourRatio
    voxelsize = mdh.voxelsize_nm
    voxelsize.z = 50.
    zs = 50. * np.arange(-30., 31)
    p1 = fourierHNA.GenZernikeDPSF(zs, 70, zCoeffs, ns=ns)
    interpolator.setModel('foo', p1, voxelsize)

    estimator.splines.clear()
    estimator.calibrate(interpolator, mdh)

    sp_all = [
        startEstROI(r2[j], mdh, interpolator, estimator,
                    mdh['Analysis.ColourRatio'], mdh['Analysis.AxialShift'])
        for j in range(len(r2))
    ]

    mf = np.array([(fitROI(r2[j], mdh, interpolator, sp_all[j],
                           mdh['Analysis.ColourRatio'],
                           mdh['Analysis.AxialShift'])[1]).sum()
                   for j in range(len(r2))])

    #return mf[mf < (median(mf)+ 2*std(mf))].mean()
    return np.sqrt(mf).mean()
Ejemplo n.º 22
0
    def execute(self, namespace):
        from PYME.localization import traveling_salesperson
        from scipy.spatial import distance_matrix

        points = namespace[self.input]

        try:
            positions = np.stack([points['x_um'], points['y_um']], axis=1)
        except KeyError:
            # units don't matter for these calculations, but we want to preserve them on the other side
            positions = np.stack([points['x'], points['y']], axis=1) / 1e3

        distances = distance_matrix(positions, positions)

        route, best_distance, og_distance = traveling_salesperson.two_opt(
            distances, self.epsilon)

        # plot_path(positions, route)
        out = tabular.MappingFilter({
            'x_um': positions[:, 0][route],
            'y_um': positions[:, 1][route]
        })
        out.mdh = MetaDataHandler.NestedClassMDHandler()
        try:
            out.mdh.copyEntriesFrom(points.mdh)
        except AttributeError:
            pass
        out.mdh['TravelingSalesperson.Distance'] = best_distance
        out.mdh['TravelingSalesperson.OriginalDistance'] = og_distance

        namespace[self.output] = out
Ejemplo n.º 23
0
    def execute(self, namespace):
        from PYME.Analysis.points.traveling_salesperson import sectioned_two_opt

        points = namespace[self.input]

        try:
            positions = np.stack([points['x_um'], points['y_um']], axis=1)
        except KeyError:
            positions = np.stack([points['x'], points['y']], axis=1) / 1e3

        final_route = sectioned_two_opt.tsp_chunk_two_opt_multiproc(positions, self.epsilon, self.points_per_chunk,
                                                                        self.n_processes)

        # note that we sorted the positions / sections once before, need to propagate that through before sorting
        out = tabular.DictSource({k: points[k][final_route] for k in points.keys()})
        out.mdh = MetaDataHandler.NestedClassMDHandler()
        try:
            out.mdh.copyEntriesFrom(points.mdh)
        except AttributeError:
            pass

        # use the already sorted output to get the final distance
        try:
            og_distance = np.sqrt((points['x_um'][1:] - points['x_um'][:-1]) ** 2 + (points['y_um'][1:] - points['y_um'][:-1]) ** 2).sum()
            final_distance = np.sqrt((out['x_um'][1:] - out['x_um'][:-1]) ** 2 + (out['y_um'][1:] - out['y_um'][:-1]) ** 2).sum()
        except KeyError:
            og_distance = np.sqrt((points['x'][1:] - points['x'][:-1]) ** 2 + (points['y'][1:] - points['y'][:-1]) ** 2).sum() / 1e3
            final_distance = np.sqrt((out['x'][1:] - out['x'][:-1]) ** 2 + (out['y'][1:] - out['y'][:-1]) ** 2).sum() / 1e3

        out.mdh['TravelingSalesperson.OriginalDistance'] = og_distance
        out.mdh['TravelingSalesperson.Distance'] = final_distance

        namespace[self.output] = out
Ejemplo n.º 24
0
    def execute(self, namespace):
        data = namespace[self.input]

        n_rows = len(data)

        if n_rows < self.num_to_select:
            if self.strict:
                raise IndexError(
                    'Trying to select %d from data with only %d rows. To allow truncation, use strict=False'
                    % (self.num_to_select, n_rows))
            else:
                logger.info(
                    'RandomSubset: Truncating from %d to %d rows as data only has %d rows. To make this an error, use strict=True'
                    % (self.num_to_select, n_rows, n_rows))

        if self.strict and (self.num_to_select > 0.5 * n_rows):
            logger.warning(
                'RandomSubset: Selecting %d from %d rows will not be very random'
                % (self.num_to_select, n_rows))

        out = tabular.RandomSelectionFilter(data,
                                            num_Samples=min(
                                                n_rows, self.num_to_select))

        try:
            out.mdh = MetaDataHandler.DictMDHandler(data.mdh)
        except AttributeError:
            pass

        namespace[self.output] = out
Ejemplo n.º 25
0
    def doStartLog(self):
        """Record pertinant information to metadata at start of acquisition.
        
        Loops through all registered sources of start metadata and adds their entries.
        
        See Also
        --------
        PYME.IO.MetaDataHandler
        """
        dt = datetime.datetime.now()

        self.dtStart = dt

        self.tStart = time.time()

        # create an in-memory metadata handler and populate this prior to copying data over to the spooler
        # metadata handler. This significantly improves performance if the spooler metadata handler has high latency
        # (as is the case for both the HDFMetaDataHandler and, especially, the QueueMetaDataHandler).
        mdt = MetaDataHandler.NestedClassMDHandler()
        mdt.setEntry('StartTime', self.tStart)

        #loop over all providers of metadata
        for mdgen in MetaDataHandler.provideStartMetadata:
            mdgen(mdt)

        self.md.copyEntriesFrom(mdt)
Ejemplo n.º 26
0
def genImageID(filename, guess=False):
    ext = os.path.splitext(filename)[1]
    #print ext

    try:
        if ext == '.h5':
            return genDataFileID(filename)
        elif ext == '.h5r':
            h5f = tables.open_file(filename)
            md = MetaDataHandler.HDFMDHandler(h5f)

            if 'Analysis.DataFileID' in md.getEntryNames():
                ret = md.getEntry('Analysis.DataFileID')
            elif guess:
                ret = guessH5RImageID(filename)
                #print ret
            else:
                ret = None
            #print guess, ret

            h5f.close()
            return ret
        else:
            return hashString32(filename)
    except:
        return hashString32(filename)
Ejemplo n.º 27
0
    def pushImagesHDF(self, image):
        dataFilename = image.seriesName
        resultsFilename = _verifyResultsFilename(
            genResultFileName(image.seriesName))
        self.queueName = resultsFilename

        self.tq.createQueue('HDFTaskQueue',
                            self.queueName,
                            dataFilename=dataFilename,
                            resultsFilename=resultsFilename,
                            startAt='notYet')

        mdhQ = MetaDataHandler.QueueMDHandler(self.tq, self.queueName,
                                              self.analysisMDH)
        mdhQ['DataFileID'] = fileID.genDataSourceID(image.dataSource)

        #        evts = self.image.dataSource.getEvents()
        #        if len(evts) > 0:
        #            self.tq.addQueueEvents(self.image.seriesName, evts)

        self.resultsMdh = mdhQ

        self.tq.releaseTasks(self.queueName,
                             self.analysisMDH['Analysis.StartAt'])

        self.onImagesPushed.send(self)
Ejemplo n.º 28
0
def getImageTags(filename):
    ext = os.path.splitext(filename)[1]
    #print ext

    tags = []

    try:
        if ext in ['.h5', '.h5r']:
            h5f = tables.open_file(filename)
            if 'Events' in dir(h5f.root):
                events = h5f.root.Events[:]

                evKeyNames = set()
                for e in events:
                    evKeyNames.add(e['EventName'])

                if b'ProtocolFocus' in evKeyNames:
                    tags.append('Z-Stack')

            md = MetaDataHandler.HDFMDHandler(h5f)

            if 'Protocol.Filename' in md.getEntryNames():
                tags.append('Protocol_%s' % md.getEntry('Protocol.Filename'))

            h5f.close()
    except:
        pass

    return tags
Ejemplo n.º 29
0
    def OnMAquireOnePic(self, event):
        import numpy as np
        self.scope.frameWrangler.stop()
        ds2 = np.atleast_3d(self.scope.frameWrangler.currentFrame.reshape(self.scope.cam.GetPicWidth(),self.scope.cam.GetPicHeight()).copy())


        #metadata handling
        mdh = MetaDataHandler.NestedClassMDHandler()
        mdh.setEntry('StartTime', time.time())
        mdh.setEntry('AcquisitionType', 'SingleImage')

        #loop over all providers of metadata
        for mdgen in MetaDataHandler.provideStartMetadata:
            mdgen(mdh)

        im = dsviewer.ImageStack(data = ds2, mdh = mdh, titleStub='Unsaved Image')
        if not im.mode == 'graph':
            im.mode = 'lite'

        #print im.mode
        dvf = dsviewer.DSViewFrame(im, mode= im.mode, size=(500, 500))
        dvf.SetSize((500,500))
        dvf.Show()

        self.snapNum += 1

        self.scope.frameWrangler.Prepare(True)
        self.scope.frameWrangler.start()
Ejemplo n.º 30
0
    def __init__(self, scope, **kwargs):
        """
        Create a stack settings object.
        
        NB - extra args map 1:1 to stack metadata entries. Start and end pos are ignored if ScanMode = 'Middle and Number'
        
        Parameters
        ----------
        scope
        ScanMode
        StartPos
        EndPos
        StepSize
        NumSlices
        ScanPiezo
        """
        #PreviewAquisator.__init__(self, chans, cam, shutters, None)
        self.scope = scope
        #self.log = _log
        self.mdh = MetaDataHandler.NestedClassMDHandler()
        #register as a provider of metadata
        MetaDataHandler.provideStartMetadata.append(self.ProvideStackMetadata)

        self._settings_changed = threading.Condition()

        d1 = dict(self.DEFAULTS)
        d1.update(kwargs)

        self.update(**d1)

        self.direction = self.FORWARDS

        from PYME.Acquire import webui
        # add webui endpoints (if running under webui)
        webui.add_endpoints(self, '/stack_settings')