Пример #1
0
    def __init__(self,
                 spooler,
                 seriesName,
                 analysisMetadata,
                 resultsFilename=None,
                 startAt=0,
                 serverfilter=clusterIO.local_serverfilter,
                 **kwargs):
        # TODO - reduce duplication of `LocalisationRule.__init__()` and `LocalisationRule._setup()`
        from PYME.IO import MetaDataHandler
        from PYME.Analysis import MetaData
        from PYME.IO.FileUtils.nameUtils import genClusterResultFileName
        from PYME.IO import unifiedIO

        self.spooler = spooler

        if resultsFilename is None:
            resultsFilename = genClusterResultFileName(seriesName)

        resultsFilename = verify_cluster_results_filename(resultsFilename)
        logger.info('Results file: ' + resultsFilename)

        resultsMdh = MetaDataHandler.DictMDHandler()
        # NB - anything passed in analysis MDH will wipe out corresponding entries in the series metadata
        resultsMdh.update(self.spooler.md)
        resultsMdh.update(analysisMetadata)
        resultsMdh['EstimatedLaserOnFrameNo'] = resultsMdh.getOrDefault(
            'EstimatedLaserOnFrameNo',
            resultsMdh.getOrDefault('Analysis.StartAt', 0))
        MetaData.fixEMGain(resultsMdh)

        self._setup(seriesName, resultsMdh, resultsFilename, startAt,
                    serverfilter)

        Rule.__init__(self, **kwargs)
Пример #2
0
    def __init__(self,
                 seriesName,
                 analysisMetadata,
                 resultsFilename=None,
                 startAt=0,
                 dataSourceModule=None,
                 serverfilter=clusterIO.local_serverfilter,
                 **kwargs):
        from PYME.IO import MetaDataHandler
        from PYME.Analysis import MetaData
        from PYME.IO.FileUtils.nameUtils import genClusterResultFileName
        from PYME.IO import unifiedIO

        unifiedIO.assert_uri_ok(seriesName)

        if resultsFilename is None:
            resultsFilename = genClusterResultFileName(seriesName)

        resultsFilename = verify_cluster_results_filename(resultsFilename)
        logger.info('Results file: ' + resultsFilename)

        resultsMdh = MetaDataHandler.NestedClassMDHandler()
        # NB - anything passed in analysis MDH will wipe out corresponding entries in the series metadata
        resultsMdh.update(
            json.loads(unifiedIO.read(seriesName + '/metadata.json')))
        resultsMdh.update(analysisMetadata)

        resultsMdh['EstimatedLaserOnFrameNo'] = resultsMdh.getOrDefault(
            'EstimatedLaserOnFrameNo',
            resultsMdh.getOrDefault('Analysis.StartAt', 0))
        MetaData.fixEMGain(resultsMdh)

        self._setup(seriesName, resultsMdh, resultsFilename, startAt,
                    serverfilter)

        #load data source
        if dataSourceModule is None:
            DataSource = DataSources.getDataSourceForFilename(seriesName)
        else:
            DataSource = __import__(
                'PYME.IO.DataSources.' + dataSourceModule,
                fromlist=['PYME', 'io',
                          'DataSources']).DataSource  #import our data source

        self.ds = DataSource(seriesName)

        logger.debug('DataSource.__class__: %s' % self.ds.__class__)

        Rule.__init__(self, **kwargs)
Пример #3
0
def launch_localize(analysisMDH, seriesName):
    """
    Pushes an analysis task for a given series to the distributor

    Parameters
    ----------
    analysisMDH : dictionary-like
        MetaDataHandler describing the analysis tasks to launch
    seriesName : str
        cluster path, e.g. pyme-cluster:///example_folder/series
    Returns
    -------

    """
    #import logging
    import json
    #from PYME.ParallelTasks import HTTPTaskPusher
    from PYME.IO import MetaDataHandler
    from PYME.Analysis import MetaData
    from PYME.IO.FileUtils.nameUtils import genClusterResultFileName
    from PYME.IO import unifiedIO

    unifiedIO.assert_uri_ok(seriesName)
    seriesName = seriesName

    resultsFilename = verify_cluster_results_filename(
        genClusterResultFileName(seriesName))
    logger.info('Results file: ' + resultsFilename)

    resultsMdh = MetaDataHandler.NestedClassMDHandler()
    # NB - anything passed in analysis MDH will wipe out corresponding entries in the series metadata
    resultsMdh.update(json.loads(unifiedIO.read(seriesName +
                                                '/metadata.json')))
    resultsMdh.update(analysisMDH)

    resultsMdh['EstimatedLaserOnFrameNo'] = resultsMdh.getOrDefault(
        'EstimatedLaserOnFrameNo',
        resultsMdh.getOrDefault('Analysis.StartAt', 0))
    MetaData.fixEMGain(resultsMdh)
    # resultsMdh['DataFileID'] = fileID.genDataSourceID(image.dataSource)

    # TODO - do we need to keep track of the pushers in some way (we currently rely on the fact that the pushing thread
    # will hold a reference
    pusher = HTTPRulePusher(dataSourceID=seriesName,
                            metadata=resultsMdh,
                            resultsFilename=resultsFilename)

    logging.debug('Queue created')