def __init__(self, seriesName, analysisMetadata, resultsFilename=None, startAt=0, dataSourceModule=None, serverfilter=clusterIO.local_serverfilter, **kwargs): from PYME.IO import MetaDataHandler from PYME.Analysis import MetaData from PYME.IO.FileUtils.nameUtils import genClusterResultFileName from PYME.IO import unifiedIO unifiedIO.assert_uri_ok(seriesName) if resultsFilename is None: resultsFilename = genClusterResultFileName(seriesName) resultsFilename = verify_cluster_results_filename(resultsFilename) logger.info('Results file: ' + resultsFilename) resultsMdh = MetaDataHandler.NestedClassMDHandler() # NB - anything passed in analysis MDH will wipe out corresponding entries in the series metadata resultsMdh.update( json.loads(unifiedIO.read(seriesName + '/metadata.json'))) resultsMdh.update(analysisMetadata) resultsMdh['EstimatedLaserOnFrameNo'] = resultsMdh.getOrDefault( 'EstimatedLaserOnFrameNo', resultsMdh.getOrDefault('Analysis.StartAt', 0)) MetaData.fixEMGain(resultsMdh) self._setup(seriesName, resultsMdh, resultsFilename, startAt, serverfilter) #load data source if dataSourceModule is None: DataSource = DataSources.getDataSourceForFilename(seriesName) else: DataSource = __import__( 'PYME.IO.DataSources.' + dataSourceModule, fromlist=['PYME', 'io', 'DataSources']).DataSource #import our data source self.ds = DataSource(seriesName) logger.debug('DataSource.__class__: %s' % self.ds.__class__) Rule.__init__(self, **kwargs)
def launch_localize(analysisMDH, seriesName): """ Pushes an analysis task for a given series to the distributor Parameters ---------- analysisMDH : dictionary-like MetaDataHandler describing the analysis tasks to launch seriesName : str cluster path, e.g. pyme-cluster:///example_folder/series Returns ------- """ #import logging import json #from PYME.ParallelTasks import HTTPTaskPusher from PYME.IO import MetaDataHandler from PYME.Analysis import MetaData from PYME.IO.FileUtils.nameUtils import genClusterResultFileName from PYME.IO import unifiedIO unifiedIO.assert_uri_ok(seriesName) seriesName = seriesName resultsFilename = verify_cluster_results_filename( genClusterResultFileName(seriesName)) logger.info('Results file: ' + resultsFilename) resultsMdh = MetaDataHandler.NestedClassMDHandler() # NB - anything passed in analysis MDH will wipe out corresponding entries in the series metadata resultsMdh.update(json.loads(unifiedIO.read(seriesName + '/metadata.json'))) resultsMdh.update(analysisMDH) resultsMdh['EstimatedLaserOnFrameNo'] = resultsMdh.getOrDefault( 'EstimatedLaserOnFrameNo', resultsMdh.getOrDefault('Analysis.StartAt', 0)) MetaData.fixEMGain(resultsMdh) # resultsMdh['DataFileID'] = fileID.genDataSourceID(image.dataSource) # TODO - do we need to keep track of the pushers in some way (we currently rely on the fact that the pushing thread # will hold a reference pusher = HTTPRulePusher(dataSourceID=seriesName, metadata=resultsMdh, resultsFilename=resultsFilename) logging.debug('Queue created')