def _checkOutputExists(self, fn):
     if self.spoolType == 'Cluster':
         from PYME.Acquire import HTTPSpooler
         # special case for HTTP spooling.  Make sure 000\series.pcs -> 000/series.pcs
         pyme_cluster = self.dirname + '/' + fn.replace('\\', '/')
         logger.debug('Looking for %s (.pcs or .h5) on cluster' % pyme_cluster)
         return HTTPSpooler.exists(pyme_cluster + '.pcs') or HTTPSpooler.exists(pyme_cluster + '.h5')
         #return (fn + '.h5/') in HTTPSpooler.clusterIO.listdir(self.dirname)
     else:
         local_h5 = os.sep.join([self.dirname, fn + '.h5'])
         logger.debug('Looking for %s on local machine' % local_h5)
         return os.path.exists(local_h5)
    def run(self, filename=None, nFrames = 2000, interval=0, compression=False):
        if filename is None:
            filename = 'test_%3.1f' % time.time()

        self.imgSource = ImageFrameSource()
        self.metadataSource = MDSource(self.mdh)
        MetaDataHandler.provideStartMetadata.append(self.metadataSource)

        #generate the spooler
        if compression:
            self.spooler = HTTPSpooler.Spooler(filename, self.onFrame, frameShape = None, serverfilter=self.serverfilter)
        else:
            self.spooler = HTTPSpooler.Spooler(filename, self.onFrame,
                                               frameShape = None, serverfilter=self.serverfilter,
                                               compressionSettings={'compression': HTTPSpooler.PZFFormat.DATA_COMP_RAW,
                                                                    'quantization':HTTPSpooler.PZFFormat.DATA_QUANT_NONE})
        
        try:
            #spool our data
            self.spooler.StartSpool()
    
            print(self.spooler.seriesName)
    
            startTime = time.time()
    
            self._spoolData(nFrames, interval)
    
            #wait until we've sent everything
            #this is a bit of a hack
            time.sleep(.1)
            while not self.spooler.finished():
                time.sleep(.1)
    
            endTime = time.time()
            duration = endTime - startTime
    
            print('######################################')
            print('%d frames spooled in %f seconds' % (nFrames, duration))
            print('%3.0f frames per second' % (nFrames/duration))
            print('Avg throughput: %3.0f MB/s' % (nFrames*self.testData.nbytes/(1e6*duration)))
    
            self.spooler.StopSpool()
        finally:
            self.spooler.cleanup()
    def OnNewSeries(self, metadataFilename, comp_settings=None):
        """Called when a new series is detected (ie the <seriesname>.json)
        file is detected
        """
        if comp_settings is None:
            comp_settings = {}
        # Make sure that json file is done writing
        success = _wait_for_file(metadataFilename, self.timeout)
        if not success:
            raise UserWarning('dcimg file is taking too long to finish writing')


        #create an empty metadatahandler
        self.mdh = MetaDataHandler.NestedClassMDHandler(MetaData.BareBones)

        #load metadata from file and insert into our metadata handler
        with open(metadataFilename, 'r') as f:
            mdd = json.load(f)
            self.mdh.update(mdd)

        #determine a filename on the cluster from our local filename
        #TODO - make this more complex to generate suitable directory structures
        filename = unifiedIO.verbose_fix_name(os.path.splitext(metadataFilename)[0])
        
        dirname, seriesname = os.path.split(filename)
        
        #Strip G:\\ in filename to test if it caused connection problem to some nodes in cluster
        dirname = dirname[dirname.find('\\') + 1:]
        
        cluster_filename = os.path.join(dirname, '%03d' % int(self.n_spooled/1000), seriesname)
        
        #create virtual frame and metadata sources
        self.imgSource = ImageFrameSource()
        self.metadataSource = MDSource(self.mdh)
        MetaDataHandler.provideStartMetadata.append(self.metadataSource)

        #generate the spooler
        comp_settings.update({'quantizationOffset': self.mdh.getOrDefault('Camera.ADOffset', 0)})
        self.spooler = HTTPSpooler.Spooler(cluster_filename, self.imgSource.onFrame, frameShape=None,
                                           compressionSettings=comp_settings)

        self.n_spooled += 1
        #spool our data
        self.spooler.StartSpool()
def ExportImageToCluster(image, filename, progCallback=None):
    """Exports the given image to a file on the cluster
    
    Parameters
    ----------
    
    image : PYME.IO.image.ImageStack object
        the source image
    filename : string
        the filename on the cluster
        
    """

    #create virtual frame and metadata sources
    imgSource = ImageFrameSource()
    mds = MDSource(image.mdh)
    MetaDataHandler.provideStartMetadata.append(mds)

    if not progCallback is None:
        imgSource.spoolProgress.connect(progCallback)

    #queueName = getRelFilename(self.dirname + filename + '.h5')

    #generate the spooler
    spooler = HTTPSpooler.Spooler(filename,
                                  imgSource.onFrame,
                                  frameShape=image.data.shape[:2])

    #spool our data
    spooler.StartSpool()
    imgSource.spoolData(image.data)
    spooler.FlushBuffer()
    spooler.StopSpool()

    #remove the metadata generator
    MetaDataHandler.provideStartMetadata.remove(mds)
Esempio n. 5
0
    def OnBStartSpoolButton(self, event=None, stack=False):
        '''GUI callback to start spooling.
        
        NB: this is also called programatically by the start stack button.'''

        #fn = wx.FileSelector('Save spooled data as ...', default_extension='.log',wildcard='*.log')
        #if not fn == '': #if the user cancelled
        #    self.spooler = Spooler.Spooler(self.scope, fn, self.scope.pa, self)
        #    self.bStartSpool.Enable(False)
        #    self.bStopSpooling.Enable(True)
        #    self.stSpoolingTo.Enable(True)
        #    self.stNImages.Enable(True)
        #    self.stSpoolingTo.SetLabel('Spooling to ' + fn)
        #    self.stNImages.SetLabel('0 images spooled in 0 minutes')

        fn = self.tcSpoolFile.GetValue()

        if fn == '':  #sanity checking
            wx.MessageBox('Please enter a series name', 'No series name given',
                          wx.OK)
            return  #bail

        if not os.path.exists(self.dirname):
            os.makedirs(self.dirname)

        if not self.dirname[-1] == os.sep:
            self.dirname += os.sep

        if (fn + '.h5') in os.listdir(
                self.dirname):  #check to see if data with the same name exists
            ans = wx.MessageBox('A series with the same name already exists',
                                'Error', wx.OK)
            #overwriting doesn't work ... so just bail
            #increment the series counter first, though, so hopefully we don't get the same error on the next try
            self.seriesCounter += 1
            self.seriesName = self._GenSeriesName()
            self.tcSpoolFile.SetValue(self.seriesName)
            #if ans == wx.NO:
            return  #bail

        if self.cbCompress.GetValue():
            compLevel = 2
        else:
            compLevel = 0

        if stack:
            protocol = self.protocolZ
            print(protocol)
        else:
            protocol = self.protocol

        if not preflight.ShowPreflightResults(self,
                                              self.protocol.PreflightCheck()):
            return  #bail if we failed the pre flight check, and the user didn't choose to continue

        spoolType = self.rbQueue.GetStringSelection()
        #if self.cbQueue.GetValue():
        if spoolType == 'Queue':
            self.queueName = getRelFilename(self.dirname + fn + '.h5')
            self.spooler = QueueSpooler.Spooler(self.scope,
                                                self.queueName,
                                                self.scope.pa,
                                                protocol,
                                                self,
                                                complevel=compLevel)
            self.bAnalyse.Enable(True)
        elif spoolType == 'HTTP':
            self.queueName = self.dirname + fn + '.h5'
            self.spooler = HTTPSpooler.Spooler(self.scope,
                                               self.queueName,
                                               self.scope.pa,
                                               protocol,
                                               self,
                                               complevel=compLevel)
            self.bAnalyse.Enable(True)
        else:
            self.spooler = HDFSpooler.Spooler(self.scope,
                                              self.dirname + fn + '.h5',
                                              self.scope.pa,
                                              protocol,
                                              self,
                                              complevel=compLevel)

        #if stack:
        #    self.spooler.md.setEntry('ZStack', True)

        self.bStartSpool.Enable(False)
        self.bStartStack.Enable(False)
        self.bStopSpooling.Enable(True)
        self.stSpoolingTo.Enable(True)
        self.stNImages.Enable(True)
        self.stSpoolingTo.SetLabel('Spooling to ' + fn)
        self.stNImages.SetLabel('0 images spooled in 0 minutes')

        if sampInf:
            sampleInformation.getSampleData(self, self.spooler.md)
Esempio n. 6
0
    def StartSpooling(self,
                      fn=None,
                      stack=False,
                      compLevel=2,
                      zDwellTime=None,
                      doPreflightCheck=True,
                      maxFrames=sys.maxsize,
                      pzf_compression_settings=None,
                      cluster_h5=False):
        """Start spooling
        """

        # these settings were managed by the GUI, but are now managed by the controller, still allow them to be passed in,
        # but default to using our internal values
        compLevel = self.hdf_compression_level if compLevel is None else compLevel
        pzf_compression_settings = self.pzf_compression_settings if pzf_compression_settings is None else pzf_compression_settings
        stack = self.z_stepped if stack is None else stack
        cluster_h5 = self.cluster_h5 if cluster_h5 is None else cluster_h5
        fn = self.seriesName if fn in ['', None] else fn
        zDwellTime = self.z_dwell if zDwellTime is None else zDwellTime

        #make directories as needed
        if not (self.spoolType == 'Cluster'):
            dirname = os.path.split(self._get_queue_name(fn))[0]
            if not os.path.exists(dirname):
                os.makedirs(dirname)

        if self._checkOutputExists(
                fn):  #check to see if data with the same name exists
            self.seriesCounter += 1
            self.seriesName = self._GenSeriesName()

            raise IOError('Output file already exists')

        if stack:
            protocol = self.protocolZ
            if not zDwellTime is None:
                protocol.dwellTime = zDwellTime
            print(protocol)
        else:
            protocol = self.protocol

        if doPreflightCheck and not preflight.ShowPreflightResults(
                None, self.protocol.PreflightCheck()):
            return  #bail if we failed the pre flight check, and the user didn't choose to continue

        #fix timing when using fake camera
        if self.scope.cam.__class__.__name__ == 'FakeCamera':
            fakeCycleTime = self.scope.cam.GetIntegTime()
        else:
            fakeCycleTime = None

        frameShape = (self.scope.cam.GetPicWidth(),
                      self.scope.cam.GetPicHeight())

        if self.spoolType == 'Queue':
            from PYME.Acquire import QueueSpooler
            self.queueName = getRelFilename(self._get_queue_name(fn))
            self.spooler = QueueSpooler.Spooler(
                self.queueName,
                self.scope.frameWrangler.onFrame,
                frameShape=frameShape,
                protocol=protocol,
                guiUpdateCallback=self._ProgressUpate,
                complevel=compLevel,
                fakeCamCycleTime=fakeCycleTime,
                maxFrames=maxFrames)
        elif self.spoolType == 'Cluster':
            from PYME.Acquire import HTTPSpooler
            self.queueName = self._get_queue_name(fn, pcs=(not cluster_h5))
            self.spooler = HTTPSpooler.Spooler(
                self.queueName,
                self.scope.frameWrangler.onFrame,
                frameShape=frameShape,
                protocol=protocol,
                guiUpdateCallback=self._ProgressUpate,
                complevel=compLevel,
                fakeCamCycleTime=fakeCycleTime,
                maxFrames=maxFrames,
                compressionSettings=pzf_compression_settings,
                aggregate_h5=cluster_h5)

        else:
            from PYME.Acquire import HDFSpooler
            self.spooler = HDFSpooler.Spooler(
                self._get_queue_name(fn),
                self.scope.frameWrangler.onFrame,
                frameShape=frameShape,
                protocol=protocol,
                guiUpdateCallback=self._ProgressUpate,
                complevel=compLevel,
                fakeCamCycleTime=fakeCycleTime,
                maxFrames=maxFrames)

        #TODO - sample info is probably better handled with a metadata hook
        #if sampInf:
        #    try:
        #        sampleInformation.getSampleData(self, self.spooler.md)
        #    except:
        #        #the connection to the database will timeout if not present
        #        #FIXME: catch the right exception (or delegate handling to sampleInformation module)
        #        pass

        self.spooler.onSpoolStop.connect(self.SpoolStopped)
        self.spooler.StartSpool()

        self.onSpoolStart.send(self)

        #return a function which can be called to indicate if we are done
        return lambda: not self.spooler.spoolOn
    def start_spooling(self, fn=None, settings={}, preflight_mode='interactive'):
        """

        Parameters
        ----------
        fn : str, optional
            fn can be hardcoded here, otherwise differs to the seriesName
            property which will create one if need-be.
        settings : dict
            keys should be `SpoolController` attributes or properties with
            setters. Not all keys must be present, and example keys include:
                method : str
                    One of 'File', 'Cluster', or 'Queue'(py2 only)
                hdf_compression_level: int
                    zlib compression level that pytables should use (spool to
                    file and queue)
                z_stepped : bool
                    toggle z-stepping during acquisition
                z_dwell : int
                    number of frames to acquire at each z level (predicated on
                    `SpoolController.z_stepped` being True)
                cluster_h5 : bool
                    Toggle spooling to single h5 file on cluster rather than pzf
                    file per frame. Only applicable to 'Cluster' `method` and
                    preferred for PYMEClusterOfOne.
                pzf_compression_settings : dict
                    Compression settings relevant for 'Cluster' `method` if
                    `cluster_h5` is False. See HTTPSpooler.defaultCompSettings.
                protocol_name : str
                    Note that passing the protocol name will force a (re)load of
                    the protocol file (even if it is already selected).
                max_frames : int, optional
                    point at which to end the series automatically, by default
                    sys.maxsize
                subdirectory : str, optional
                    Directory within current set directory to spool this series. The
                    directory will be created if it doesn't already exist.
                extra_metadata : dict, optional
                    metadata to supplement this series for entries known prior to
                    acquisition which do not have handlers to hook start metadata
        preflight_mode : str (default='interactive')
            What to do when the preflight check fails. Options are 'interactive', 'warn', 'abort' and 'skip' which will
            display a dialog and prompt the user, log a warning and continue, and log an error and abort, or skip completely.
            The former is suitable for interactive acquisition, whereas one of the latter modes is likely better for automated spooling
            via the action manager.
        """
        # these settings were managed by the GUI, but are now managed by the 
        # controller, still allow them to be passed in, but default to internals
        
        
        fn = self.seriesName if fn in ['', None] else fn
        stack = settings.get('z_stepped', self.z_stepped)
        compLevel = settings.get('hdf_compression_level', self.hdf_compression_level)
        pzf_compression_settings = settings.get('pzf_compression_settings', self.pzf_compression_settings)
        cluster_h5 = settings.get('cluster_h5', self.cluster_h5)
        maxFrames = settings.get('max_frames', sys.maxsize)
        stack_settings = settings.get('stack_settings', None)
        
        
        # try stack settings for z_dwell, then aq settings.
        # precedence is settings > stack_settings > self.z_dwell
        # The reasoning for allowing the dwell time to be set in either the spooling or stack settings is to allow
        # API users to choose which is most coherent for their use case (it would seem logical to put dwell time with
        # the other stack settings, but this becomes problematic when sharing stack settings across modalities - e.g.
        # PALM/STORM and widefield stacks which are likely to share most of the stack settings but have greatly different
        # z dwell times). PYMEAcquire specifies it in the spooling/series settings by default to allow shared usage
        # between modalities.
        if stack_settings:
            if isinstance(stack_settings, dict):
                z_dwell = stack_settings.get('DwellFrames', self.z_dwell)
            else:
                # have a StackSettings object
                # TODO - fix this to be a bit more sane and not use private attributes etc ...
                z_dwell = stack_settings._dwell_frames
                # z_dwell defaults to -1  (with a meaning of ignore) in StackSettings objects if not value is not
                # explicitly provided. In this case, use our internal value instead. The reason for the 'ignore'
                # special value is to allow the same StackSettings object to be used for widefield stacks and
                # localization series (where sharing everything except dwell time makes sense).
                if z_dwell < 1:
                    z_dwell = self.z_dwell
        else:
            z_dwell = self.z_dwell
        
        z_dwell = settings.get('z_dwell', z_dwell)
        
        if (stack_settings is not None) and (not isinstance(stack_settings, stackSettings.StackSettings)):
            # let us pass stack settings as a dict, constructing a StackSettings instance as needed
            stack_settings = stackSettings.StackSettings(**dict(stack_settings))
        
        protocol_name = settings.get('protocol_name')
        if protocol_name is None:
            protocol, protocol_z = self.protocol, self.protocolZ
        else:
            pmod = prot.get_protocol(protocol_name)
            protocol, protocol_z = pmod.PROTOCOL, pmod.PROTOCOL_STACK

        subdirectory  = settings.get('subdirectory', None)
        # make directories as needed, makedirs(dir, exist_ok=True) once py2 support is dropped
        if (self.spoolType != 'Cluster') and (not os.path.exists(self.get_dirname(subdirectory))):
                os.makedirs(self.get_dirname(subdirectory))

        if self._checkOutputExists(fn): #check to see if data with the same name exists
            self.seriesCounter +=1
            self.seriesName = self._GenSeriesName()
            
            raise IOError('A series with the same name already exists')

        if stack:
            protocol = protocol_z
            protocol.dwellTime = z_dwell
            #print(protocol)
        else:
            protocol = protocol

        if (preflight_mode != 'skip') and not preflight.ShowPreflightResults(protocol.PreflightCheck(), preflight_mode):
            return #bail if we failed the pre flight check, and the user didn't choose to continue
            
          
        #fix timing when using fake camera
        if self.scope.cam.__class__.__name__ == 'FakeCamera':
            fakeCycleTime = self.scope.cam.GetIntegTime()
        else:
            fakeCycleTime = None
            
        frameShape = (self.scope.cam.GetPicWidth(), self.scope.cam.GetPicHeight())
        
        if self.spoolType == 'Queue':
            from PYME.Acquire import QueueSpooler
            self.queueName = getRelFilename(self._get_queue_name(fn, subdirectory=subdirectory))
            self.spooler = QueueSpooler.Spooler(self.queueName, self.scope.frameWrangler.onFrame, 
                                                frameShape = frameShape, protocol=protocol, 
                                                guiUpdateCallback=self._ProgressUpate, complevel=compLevel, 
                                                fakeCamCycleTime=fakeCycleTime, maxFrames=maxFrames, stack_settings=stack_settings)
        elif self.spoolType == 'Cluster':
            from PYME.Acquire import HTTPSpooler
            self.queueName = self._get_queue_name(fn, pcs=(not cluster_h5), 
                                                  subdirectory=subdirectory)
            self.spooler = HTTPSpooler.Spooler(self.queueName, self.scope.frameWrangler.onFrame,
                                               frameShape = frameShape, protocol=protocol,
                                               guiUpdateCallback=self._ProgressUpate,
                                               fakeCamCycleTime=fakeCycleTime, maxFrames=maxFrames,
                                               compressionSettings=pzf_compression_settings, aggregate_h5=cluster_h5, stack_settings=stack_settings)
           
        else:
            from PYME.Acquire import HDFSpooler
            self.spooler = HDFSpooler.Spooler(self._get_queue_name(fn, subdirectory=subdirectory),
                                              self.scope.frameWrangler.onFrame,
                                              frameShape = frameShape, protocol=protocol, 
                                              guiUpdateCallback=self._ProgressUpate, complevel=compLevel, 
                                              fakeCamCycleTime=fakeCycleTime, maxFrames=maxFrames, stack_settings=stack_settings)

        #TODO - sample info is probably better handled with a metadata hook
        #if sampInf:
        #    try:
        #        sampleInformation.getSampleData(self, self.spooler.md)
        #    except:
        #        #the connection to the database will timeout if not present
        #        #FIXME: catch the right exception (or delegate handling to sampleInformation module)
        #        pass
        extra_metadata = settings.get('extra_metadata')
        if extra_metadata is not None:
            self.spooler.md.mergeEntriesFrom(MetaDataHandler.DictMDHandler(extra_metadata))

        # stop the frameWrangler before we start spooling
        # this serves to ensure that a) we don't accidentally spool frames which were in the camera buffer when we hit start
        # and b) we get a nice clean timestamp for when the actual frames start (after any protocol init tasks)
        # it might also slightly improve performance.
        self.scope.frameWrangler.stop()
        
        try:
            self.spooler.onSpoolStop.connect(self.SpoolStopped)
            self.spooler.StartSpool()
        except:
            self.spooler.abort()
            raise

        # restart frame wrangler
        self.scope.frameWrangler.Prepare()
        self.scope.frameWrangler.start()
        
        self.onSpoolStart.send(self)
        
        #return a function which can be called to indicate if we are done
        return lambda : self.spooler.spool_complete