Ejemplo n.º 1
0
 def launch_cluster_analysis(self):
     from PYME.cluster import HTTPRulePusher
     
     seriesName = self.spooler.getURL()
     try:
         HTTPRulePusher.launch_localize(self.scope.analysisSettings.analysisMDH, seriesName)
     except:
         logger.exception('Error launching analysis for %s' % seriesName)
Ejemplo n.º 2
0
    def OnSeriesComplete(self, eventsFilename=None, zstepsFilename=None, pushTaskToCluster=False):
        """Called when the series is finished (ie we have seen)
        the events file"""

        if (not eventsFilename is None) and (os.path.exists(eventsFilename)):
            # Update event Log with events.json
            with open(eventsFilename, 'r') as f:
                 events = json.load(f)

            for evt in events:
                name, descr, timestamp = evt
                self.spooler.evtLogger.logEvent(eventName=name, eventDescr=descr, timestamp=float(timestamp))

        if (not zstepsFilename is None) and (os.path.exists(zstepsFilename)):
            #create pseudo events based on our zstep information
            with open(zstepsFilename, 'r') as f:
                zsteps = json.load(f)

            positions = zsteps['PIFOC_positions']
            startFrames = zsteps['Start_Frame_eachZ']

            startTime = self.mdh.getOrDefault('StartTime', 0)
            cycleTime = self.mdh.getOrDefault('Camera.CycleTime', 0.01) #use a default frame length of 10 ms. Not super critical

            for pos, fr in zip(positions, startFrames):
                fakeTime = startTime + cycleTime*fr
                self.spooler.evtLogger.logEvent(eventName='StartAq', eventDescr='%d' % fr, timestamp=fakeTime)
                self.spooler.evtLogger.logEvent(eventName='ProtocolFocus', eventDescr='%d, %3.3f' % (fr, pos),
                                                timestamp=fakeTime)
        
        self.spooler.StopSpool()
        self.spooler.FlushBuffer()

        if pushTaskToCluster:

            self.mdh.setEntry('Analysis.BGRange', [-32, 0])
            self.mdh.setEntry('Analysis.DebounceRadius', 4)
            self.mdh.setEntry('Analysis.DetectionThreshold', 0.75)
            self.mdh.setEntry('Analysis.FiducialThreshold', 1.8)
            self.mdh.setEntry('Analysis.FitModule', 'AstigGaussGPUFitFR')
            self.mdh.setEntry('Analysis.PCTBackground', 0.25)
            self.mdh.setEntry('Analysis.ROISize', 7.5)
            self.mdh.setEntry('Analysis.StartAt', 32)
            self.mdh.setEntry('Analysis.TrackFiducials', False)
            self.mdh.setEntry('Analysis.subtractBackground', True)
            self.mdh.setEntry('Analysis.GPUPCTBackground', True)
            cluster_filename = 'pyme-cluster://%s/%s' % (clusterIO.local_serverfilter, self.spooler.seriesName)
            HTTPRulePusher.launch_localize(analysisMDH=self.mdh, seriesName=cluster_filename)

        #remove the metadata generator
        MetaDataHandler.provideStartMetadata.remove(self.metadataSource)
Ejemplo n.º 3
0
    def pushImagesCluster(self, image):
        from PYME.cluster import HTTPRulePusher

        resultsFilename = _verifyClusterResultsFilename(
            genClusterResultFileName(image.filename))
        logging.debug('Results file: ' + resultsFilename)

        self.resultsMdh = MetaDataHandler.NestedClassMDHandler(
            self.analysisMDH)
        self.resultsMdh['DataFileID'] = fileID.genDataSourceID(
            image.dataSource)

        self.pusher = HTTPRulePusher.HTTPRulePusher(
            dataSourceID=image.filename,
            metadata=self.resultsMdh,
            resultsFilename=resultsFilename)

        self.queueName = self.pusher.queueID
        self.results_filename = resultsFilename

        debugPrint('Queue created')

        self.onImagesPushed.send(self)
Ejemplo n.º 4
0
def localize(request, analysisModule='LatGaussFitFR'):
    #import json
    from PYME.IO import MetaDataHandler
    import copy
    import time
    from PYME import config
    USE_RULES = config.get('PYMERuleserver-use', True)

    analysisModule = request.POST.get('Analysis.FitModule', analysisModule)

    f = settings_form(analysisModule)(request.POST)

    f.is_valid()

    f.cleaned_data['Analysis.FitModule'] = analysisModule

    #print json.dumps(f.cleaned_data)
    # NB - any metadata entries given here will override the series metadata later: pass analysis settings only
    analysisMDH = MetaDataHandler.NestedClassMDHandler()
    analysisMDH.update(f.cleaned_data)

    #print request.GET
    #print request.POST.getlist('series', [])

    #resultsFilename = _verifyResultsFilename(genResultFileName(image.seriesName))

    remaining_series = request.POST.getlist('series', [])

    nSeries = len(remaining_series)

    nAttempts = 0

    while len(remaining_series) > 0 and nAttempts < 3:
        nAttempts += 1

        seriesToLaunch = copy.copy(remaining_series)
        remaining_series = []

        for seriesName in seriesToLaunch:
            try:
                if USE_RULES:
                    HTTPRulePusher.launch_localize(analysisMDH, seriesName)
                else:
                    HTTPTaskPusher.launch_localize(analysisMDH, seriesName)
            except:
                logger.exception('Error launching analysis for %s' %
                                 seriesName)

                remaining_series.append(seriesName)

        if len(remaining_series) > 0:
            logging.debug(
                '%d series were not launched correctly, waiting 20s and retrying'
                % len(remaining_series))
            time.sleep(20)

    nFailed = len(remaining_series)
    if nFailed > 0:
        raise RuntimeError('Failed to push %d of %d series' %
                           (nFailed, nSeries))

    return HttpResponseRedirect('/status/queues/')