def setup(self): #I don't have the default policy in the correct place yet policyFile = pexPolicy.DefaultPolicyFile( "meas_pipeline", # package name "WcsDeterminationStageDictionary.paf", # default. policy "policy" # dir containing policies ) defaultPolicy = pexPolicy.Policy.createPolicy( policyFile, policyFile.getRepositoryPath()) #The stage can be called with an optional local policy file, which overrides the defaults #merge defaults policyFile = pexPolicy.DefaultPolicyFile( "meas_pipeline", "WcsDeterminationStageDictionary.paf", "policy") defPolicy = pexPolicy.Policy.createPolicy( policyFile, policyFile.getRepositoryPath(), True) if self.policy is None: self.policy = defaultPolicy else: self.policy.mergeDefaults(defaultPolicy) #Setup the astrometry solver path = os.path.join(os.environ['ASTROMETRY_NET_DATA_DIR'], "metadata.paf") self.solver = astromNet.GlobalAstrometrySolution(path) # self.solver.allowDistortion(self.policy.get('allowDistortion')) self.solver.setMatchThreshold(self.policy.get('matchThreshold')) #Setup the log self.log = Debug(self.log, "WcsDeterminationStageParallel") self.log.setThreshold(Log.DEBUG) self.log.log(Log.INFO, "Finished setup of WcsDeterminationStageParallel")
def __init__(self, stage=None, name="1", runID="simpleTest", universeSize=1): """create the tester @param stage a Stage instance for the first stage in a simple pipeline. @param name the name to associate with this stage (for display purposes. @param runID the run identifier to provide to the stage @param universeSize the number of parallel threads to pretend are running """ self.stage = stage self.log = Debug("SimpleStageTester") self.event = None self.inQ = Queue() self.outQ = Queue() self.brokerhost = None self.sysdata = {} self.sysdata["runId"] = runID self.sysdata["stageId"] = -1 self.sysdata["universeSize"] = universeSize self.stages = [] if stage: self.stages.append((name, stage))
class PhotoCalStageParallel(harnessStage.ParallelProcessing): """Calculate the magnitude zero point for a SourceSet for an image that has been matched to a corresponding SourceSet for a catalogue """ def setup(self): policyFile=pexPolicy.DefaultPolicyFile("meas_pipeline", # package name "PhotoCalStageDictionary.paf", # default. policy "policy" # dir containing policies ) defaultPolicy = pexPolicy.Policy.createPolicy(policyFile, policyFile.getRepositoryPath()) if self.policy is None: self.policy = defaultPolicy else: self.policy.mergeDefaults(defaultPolicy) #Setup the log self.log = Debug(self.log, "PhotoCalStageParallel") self.log.setThreshold(Log.DEBUG) self.log.log(Log.INFO, "Finished setup of PhotoCalStageParallel") def process(self, clipboard): self.log.log(Log.INFO, "Determining Photometric Zero Point") #Check inputs if clipboard is None: raise RuntimeError("Clipboard is empty") expKey = self.policy.get('inputExposureKey') if not clipboard.contains(expKey): raise RuntimeError("No exposure on clipboard") exp = clipboard.get(expKey) srcMatchSetKey = self.policy.get("sourceMatchSetKey") if not clipboard.contains(srcMatchSetKey): raise RuntimeError("No input SourceMatch set on clipboard") srcMatchSet = clipboard.get(srcMatchSetKey) #Do the work try: magObj = photocal.calcPhotoCal(srcMatchSet, log=self.log) except ValueError, e: msg = "Failed to calculate photometric zeropoint: %s" %(e) self.log.log(Log.FATAL, msg) magObj = None if magObj is not None: exp.getCalib().setFluxMag0(magObj.getFlux(0)) self.log.log(Log.INFO, "Flux of magnitude 0: %g" % (magObj.getFlux(0),)) #Save results to clipboard outputValueKey = self.policy.get("outputValueKey") clipboard.put(outputValueKey, magObj)
def __init__(self, stage=None, name="1", runID="simpleTest", universeSize=1): """create the tester @param stage a Stage instance for the first stage in a simple pipeline. @param name the name to associate with this stage (for display purposes. @param runID the run identifier to provide to the stage @param universeSize the number of parallel threads to pretend are running """ self.stage = stage self.log = Debug("SimpleStageTester") self.event = None self.inQ = Queue() self.outQ = Queue() self.brokerhost = None self.sysdata = {} self.sysdata["runId"] = runID self.sysdata["stageId"] = -1 self.sysdata["universeSize"] = universeSize self.stages = [] if stage: self.stages.append( (name, stage) )
def setup(self): #I don't have the default policy in the correct place yet policyFile=pexPolicy.DefaultPolicyFile("meas_pipeline", # package name "WcsDeterminationStageDictionary.paf", # default. policy "policy" # dir containing policies ) defaultPolicy = pexPolicy.Policy.createPolicy(policyFile, policyFile.getRepositoryPath()) #The stage can be called with an optional local policy file, which overrides the defaults #merge defaults policyFile = pexPolicy.DefaultPolicyFile("meas_pipeline", "WcsDeterminationStageDictionary.paf", "policy") defPolicy = pexPolicy.Policy.createPolicy(policyFile, policyFile.getRepositoryPath(), True) if self.policy is None: self.policy = defaultPolicy else: self.policy.mergeDefaults(defaultPolicy) #Setup the astrometry solver path=os.path.join(os.environ['ASTROMETRY_NET_DATA_DIR'], "metadata.paf") self.solver = astromNet.GlobalAstrometrySolution(path) # self.solver.allowDistortion(self.policy.get('allowDistortion')) self.solver.setMatchThreshold(self.policy.get('matchThreshold')) #Setup the log self.log = Debug(self.log, "WcsDeterminationStageParallel") self.log.setThreshold(Log.DEBUG) self.log.log(Log.INFO, "Finished setup of WcsDeterminationStageParallel")
class PhotoCalStageParallel(harnessStage.ParallelProcessing): """Calculate the magnitude zero point for a SourceSet for an image that has been matched to a corresponding SourceSet for a catalogue """ def setup(self): policyFile = pexPolicy.DefaultPolicyFile( "meas_pipeline", # package name "PhotoCalStageDictionary.paf", # default. policy "policy" # dir containing policies ) defaultPolicy = pexPolicy.Policy.createPolicy( policyFile, policyFile.getRepositoryPath()) if self.policy is None: self.policy = defaultPolicy else: self.policy.mergeDefaults(defaultPolicy) #Setup the log self.log = Debug(self.log, "PhotoCalStageParallel") self.log.setThreshold(Log.DEBUG) self.log.log(Log.INFO, "Finished setup of PhotoCalStageParallel") def process(self, clipboard): self.log.log(Log.INFO, "Determining Photometric Zero Point") #Check inputs if clipboard is None: raise RuntimeError("Clipboard is empty") expKey = self.policy.get('inputExposureKey') if not clipboard.contains(expKey): raise RuntimeError("No exposure on clipboard") exp = clipboard.get(expKey) srcMatchSetKey = self.policy.get("sourceMatchSetKey") if not clipboard.contains(srcMatchSetKey): raise RuntimeError("No input SourceMatch set on clipboard") srcMatchSet = clipboard.get(srcMatchSetKey) #Do the work try: magObj = photocal.calcPhotoCal(srcMatchSet, log=self.log) except ValueError, e: msg = "Failed to calculate photometric zeropoint: %s" % (e) self.log.log(Log.FATAL, msg) magObj = None if magObj is not None: exp.getCalib().setFluxMag0(magObj.getFlux(0)) self.log.log(Log.INFO, "Flux of magnitude 0: %g" % (magObj.getFlux(0), )) #Save results to clipboard outputValueKey = self.policy.get("outputValueKey") clipboard.put(outputValueKey, magObj)
class WcsVerificationParallel(harnessStage.ParallelProcessing): """Compute some statistics that indicate if we did a good job computing the Wcs for an image. """ def setup(self): policyFile = pexPolicy.DefaultPolicyFile( "meas_pipeline", # package name "WcsVerificationStageDictionary.paf", # default. policy "policy" # dir containing policies ) defaultPolicy = pexPolicy.Policy.createPolicy( policyFile, policyFile.getRepositoryPath()) if self.policy is None: self.policy = defaultPolicy else: self.policy.mergeDefaults(defaultPolicy) #Setup the log self.log = Debug(self.log, "WcsVerificationStageParallel") self.log.setThreshold(Log.DEBUG) self.log.log(Log.INFO, "Finished setup of WcsVerificationStageParallel") def process(self, clipboard): self.log.log(Log.INFO, "Calculating statistics on wcs solution") #Get input if clipboard is None: raise RuntimeError("Clipboard is empty") srcMatchSetKey = self.policy.get("sourceMatchSetKey") if not clipboard.contains(srcMatchSetKey): raise RuntimeError("No input SourceMatch set on clipboard") srcMatchSet = clipboard.get(srcMatchSetKey) inputExposureKey = self.policy.get("inputExposureKey") if clipboard.contains(inputExposureKey): exposure = clipboard.get(inputExposureKey) else: exposure = None #Do the work outputDict = {} outputDict.update(sip.sourceMatchStatistics(srcMatchSet)) outputDict.update( verifyWcs.checkMatches(srcMatchSet, exposure, self.log)) self.log.log( self.log.DEBUG, "cells nobj min = %(minObjectsPerCell)s max = %(maxObjectsPerCell)s mean = %(meanObjectsPerCell)s std = %(stdObjectsPerCell)s" % outputDict) # # Set the metadata # for k, v in outputDict.items(): exposure.getMetadata().set(k, v)
def setup(self): policyFile = pexPolicy.DefaultPolicyFile( "meas_pipeline", # package name "PhotoCalStageDictionary.paf", # default. policy "policy" # dir containing policies ) defaultPolicy = pexPolicy.Policy.createPolicy( policyFile, policyFile.getRepositoryPath()) if self.policy is None: self.policy = defaultPolicy else: self.policy.mergeDefaults(defaultPolicy) #Setup the log self.log = Debug(self.log, "PhotoCalStageParallel") self.log.setThreshold(Log.DEBUG) self.log.log(Log.INFO, "Finished setup of PhotoCalStageParallel")
def setup(self): """configure this stage with a policy""" # You should create a default policy file that is installed # with your Stage implmenetation's package and merge it with # that policy that wass handed to you by the framework (when # this instance was constructed). # # Here's how you do it. Note that the default policy file can # be a dictionary. Here, we indicated "examples" as the so-called # default policy repository for this package; however normally, # this is "pipeline". file = pexPolicy.DefaultPolicyFile( "pex_harness", # package name "AreaStagePolicy_dict.paf", # def. policy "examples/simpleStageTest" # dir containing policies ) defpol = pexPolicy.Policy.createPolicy(file, file.getRepositoryPath()) if self.policy is None: self.policy = defpol else: self.policy.mergeDefaults(defpol) # now we can configure our pipeline from the policy (which should # now be complete). An exception will be thrown if the merged # policy is incomplete. self.inputScale = self.policy.get("inputScale") self.outputScale = self.policy.get("outputScale") # It is important to use the internal logger provided by the # framework at construction time; this is available as self.log. # You can feel free to create child logs from it in the usual # way: # self.log = Log(self.log, "AreaStage") # # Use this for additional debugging API sugar: self.log = Debug(self.log, "AreaStage") if self.outputScale != 0: self.log.log(Log.INFO, "Area scaling factor: %i" % self.outputScale) self.log.debug(3, "Running with sliceID = %s" % self.getRank())
class WcsVerificationParallel(harnessStage.ParallelProcessing): """Compute some statistics that indicate if we did a good job computing the Wcs for an image. """ def setup(self): policyFile=pexPolicy.DefaultPolicyFile("meas_pipeline", # package name "WcsVerificationStageDictionary.paf", # default. policy "policy" # dir containing policies ) defaultPolicy = pexPolicy.Policy.createPolicy(policyFile, policyFile.getRepositoryPath()) if self.policy is None: self.policy = defaultPolicy else: self.policy.mergeDefaults(defaultPolicy) #Setup the log self.log = Debug(self.log, "WcsVerificationStageParallel") self.log.setThreshold(Log.DEBUG) self.log.log(Log.INFO, "Finished setup of WcsVerificationStageParallel") def process(self, clipboard): self.log.log(Log.INFO, "Calculating statistics on wcs solution") #Get input if clipboard is None: raise RuntimeError("Clipboard is empty") srcMatchSetKey = self.policy.get("sourceMatchSetKey") if not clipboard.contains(srcMatchSetKey): raise RuntimeError("No input SourceMatch set on clipboard") srcMatchSet = clipboard.get(srcMatchSetKey) inputExposureKey = self.policy.get("inputExposureKey") if clipboard.contains(inputExposureKey): exposure = clipboard.get(inputExposureKey) else: exposure = None #Do the work outputDict = {} outputDict.update(sip.sourceMatchStatistics(srcMatchSet)) outputDict.update(verifyWcs.checkMatches(srcMatchSet, exposure, self.log)) self.log.log(self.log.DEBUG, "cells nobj min = %(minObjectsPerCell)s max = %(maxObjectsPerCell)s mean = %(meanObjectsPerCell)s std = %(stdObjectsPerCell)s" % outputDict) # # Set the metadata # for k, v in outputDict.items(): exposure.getMetadata().set(k, v)
def postprocess(self, clipboard): """ Execute the needed postprocessing code for this Stage """ log = Debug(self.log, "postprocess") log.debug( 3, "stageId=%i, universeSize=%i" % (self.stageId, self.universeSize)) if (self.runmode == 'postprocess'): self._publish(clipboard) log.debug(3, "events published (stageId=%i)" % self.stageId) log.debug(3, "event processing done (stageId=%i)" % self.stageId)
def setup(self): """configure this stage with a policy""" file = pexPolicy.DefaultPolicyFile( "pex_harness", # package name "AreaStagePolicy_dict.paf", # def. policy "examples/simpleStageTest" # dir containing policies ) defpol = pexPolicy.Policy.createPolicy(file, file.getRepositoryPath()) if self.policy is None: self.policy = defpol else: self.policy.mergeDefaults(defpol) self.inputScale = self.policy.get("inputScale") self.outputScale = self.policy.get("outputScale") self.log = Debug(self.log, "AreaStage") if self.outputScale != 0: self.log.log(Log.INFO, "Area scaling factor: %i" % self.outputScale) self.log.debug(3, "Running with sliceID = %s" % self.getRank())
def setup(self): policyFile=pexPolicy.DefaultPolicyFile("meas_pipeline", # package name "PhotoCalStageDictionary.paf", # default. policy "policy" # dir containing policies ) defaultPolicy = pexPolicy.Policy.createPolicy(policyFile, policyFile.getRepositoryPath()) if self.policy is None: self.policy = defaultPolicy else: self.policy.mergeDefaults(defaultPolicy) #Setup the log self.log = Debug(self.log, "PhotoCalStageParallel") self.log.setThreshold(Log.DEBUG) self.log.log(Log.INFO, "Finished setup of PhotoCalStageParallel")
def setup(self): """configure this stage with a policy""" file = pexPolicy.DefaultPolicyFile("pex_harness", # package name "AreaStagePolicy_dict.paf", # def. policy "examples/simpleStageTest" # dir containing policies ) defpol = pexPolicy.Policy.createPolicy(file, file.getRepositoryPath()) if self.policy is None: self.policy = defpol else: self.policy.mergeDefaults(defpol) self.inputScale = self.policy.get("inputScale") self.outputScale = self.policy.get("outputScale") self.log = Debug(self.log, "AreaStage") if self.outputScale != 0: self.log.log(Log.INFO, "Area scaling factor: %i"% self.outputScale) self.log.debug(3, "Running with sliceID = %s" % self.getRank())
def setup(self): """configure this stage with a policy""" # You should create a default policy file that is installed # with your Stage implmenetation's package and merge it with # that policy that wass handed to you by the framework (when # this instance was constructed). # # Here's how you do it. Note that the default policy file can # be a dictionary. Here, we indicated "examples" as the so-called # default policy repository for this package; however normally, # this is "pipeline". file = pexPolicy.DefaultPolicyFile("pex_harness", # package name "AreaStagePolicy_dict.paf", # def. policy "examples/simpleStageTest" # dir containing policies ) defpol = pexPolicy.Policy.createPolicy(file, file.getRepositoryPath()) if self.policy is None: self.policy = defpol else: self.policy.mergeDefaults(defpol) # now we can configure our pipeline from the policy (which should # now be complete). An exception will be thrown if the merged # policy is incomplete. self.inputScale = self.policy.get("inputScale") self.outputScale = self.policy.get("outputScale") # It is important to use the internal logger provided by the # framework at construction time; this is available as self.log. # You can feel free to create child logs from it in the usual # way: # self.log = Log(self.log, "AreaStage") # # Use this for additional debugging API sugar: self.log = Debug(self.log, "AreaStage") if self.outputScale != 0: self.log.log(Log.INFO, "Area scaling factor: %i"% self.outputScale) self.log.debug(3, "Running with sliceID = %s" % self.getRank())
class AreaStageParallel(harnessStage.ParallelProcessing): def setup(self): """configure this stage with a policy""" file = pexPolicy.DefaultPolicyFile( "pex_harness", # package name "AreaStagePolicy_dict.paf", # def. policy "examples/simpleStageTest" # dir containing policies ) defpol = pexPolicy.Policy.createPolicy(file, file.getRepositoryPath()) if self.policy is None: self.policy = defpol else: self.policy.mergeDefaults(defpol) self.inputScale = self.policy.get("inputScale") self.outputScale = self.policy.get("outputScale") self.log = Debug(self.log, "AreaStage") if self.outputScale != 0: self.log.log(Log.INFO, "Area scaling factor: %i" % self.outputScale) self.log.debug(3, "Running with sliceID = %s" % self.getRank()) def process(self, clipboard): # if clipboard is not None: # do our work area = clipboard.get("width") * clipboard.get("height")*\ (10.0**self.inputScale/10.0**self.outputScale)**2 # maybe you want to write a debug message self.log.debug(3, "found area of %f" % area) # save the results to the clipboard clipboard.put("area", area)
class AreaStageParallel(harnessStage.ParallelProcessing): def setup(self): """configure this stage with a policy""" file = pexPolicy.DefaultPolicyFile("pex_harness", # package name "AreaStagePolicy_dict.paf", # def. policy "examples/simpleStageTest" # dir containing policies ) defpol = pexPolicy.Policy.createPolicy(file, file.getRepositoryPath()) if self.policy is None: self.policy = defpol else: self.policy.mergeDefaults(defpol) self.inputScale = self.policy.get("inputScale") self.outputScale = self.policy.get("outputScale") self.log = Debug(self.log, "AreaStage") if self.outputScale != 0: self.log.log(Log.INFO, "Area scaling factor: %i"% self.outputScale) self.log.debug(3, "Running with sliceID = %s" % self.getRank()) def process(self, clipboard): # if clipboard is not None: # do our work area = clipboard.get("width") * clipboard.get("height")*\ (10.0**self.inputScale/10.0**self.outputScale)**2 # maybe you want to write a debug message self.log.debug(3, "found area of %f" % area) # save the results to the clipboard clipboard.put("area", area)
class WcsDeterminationStageParallel(harnessStage.ParallelProcessing): """Validate the Wcs for an image using the astrometry.net package and calculate distortion coefficients Given a initial Wcs, and a list of sources (with pixel positions for each) in an image, pass these to the astrometry_net package to verify the result. Then calculate the distortion in the image and add that to the Wcs as SIP polynomials Clipboard Input: - an Exposure containing a Wcs - a SourceSet Clipboard Output - A wcs - A vector of SourceMatch objects """ def setup(self): #I don't have the default policy in the correct place yet policyFile=pexPolicy.DefaultPolicyFile("meas_pipeline", # package name "WcsDeterminationStageDictionary.paf", # default. policy "policy" # dir containing policies ) defaultPolicy = pexPolicy.Policy.createPolicy(policyFile, policyFile.getRepositoryPath()) #The stage can be called with an optional local policy file, which overrides the defaults #merge defaults policyFile = pexPolicy.DefaultPolicyFile("meas_pipeline", "WcsDeterminationStageDictionary.paf", "policy") defPolicy = pexPolicy.Policy.createPolicy(policyFile, policyFile.getRepositoryPath(), True) if self.policy is None: self.policy = defaultPolicy else: self.policy.mergeDefaults(defaultPolicy) #Setup the astrometry solver path=os.path.join(os.environ['ASTROMETRY_NET_DATA_DIR'], "metadata.paf") self.solver = astromNet.GlobalAstrometrySolution(path) # self.solver.allowDistortion(self.policy.get('allowDistortion')) self.solver.setMatchThreshold(self.policy.get('matchThreshold')) #Setup the log self.log = Debug(self.log, "WcsDeterminationStageParallel") self.log.setThreshold(Log.DEBUG) self.log.log(Log.INFO, "Finished setup of WcsDeterminationStageParallel") def process(self, clipboard): self.log.log(Log.INFO, "Determining Wcs") #Check inputs if clipboard is None: raise RuntimeError("Clipboard is empty") expKey = self.policy.get('inputExposureKey') if not clipboard.contains(expKey): raise RuntimeError("No exposure on clipboard") exp = clipboard.get(expKey) srcSetKey=self.policy.get('inputSourceSetKey') if not clipboard.contains(srcSetKey): raise RuntimeError("No wcsSourceSet on clipboard") srcSet = clipboard.get(srcSetKey) # Determine list of matching sources, and WCS astrom = measAstrom.determineWcs(self.policy, exp, srcSet, solver=self.solver, log=self.log) matchList = astrom.getMatches() if not matchList: matchList = [] wcs = astrom.getWcs() matchListMeta = astrom.getMatchMetadata() #Save results to clipboard smv = afwDet.SourceMatchVector() for m in matchList: smv.push_back(m) psmv = afwDet.PersistableSourceMatchVector(smv, matchListMeta) clipboard.put(self.policy.get('outputMatchListKey'), matchList) clipboard.put(self.policy.get('outputMatchListMetaKey'), matchListMeta) clipboard.put(self.policy.get('outputMatchListKey') + '_persistable', psmv) clipboard.put(self.policy.get('outputWcsKey'), wcs)
class AreaStageSerial(harnessStage.SerialProcessing): """ a simple Stage implmentation that calculates the area of a rectangle given the length of the sides """ def setup(self): """configure this stage with a policy""" # You should create a default policy file that is installed # with your Stage implmenetation's package and merge it with # that policy that wass handed to you by the framework (when # this instance was constructed). # # Here's how you do it. Note that the default policy file can # be a dictionary. Here, we indicated "examples" as the so-called # default policy repository for this package; however normally, # this is "pipeline". file = pexPolicy.DefaultPolicyFile("pex_harness", # package name "AreaStagePolicy_dict.paf", # def. policy "examples/simpleStageTest" # dir containing policies ) defpol = pexPolicy.Policy.createPolicy(file, file.getRepositoryPath()) if self.policy is None: self.policy = defpol else: self.policy.mergeDefaults(defpol) # now we can configure our pipeline from the policy (which should # now be complete). An exception will be thrown if the merged # policy is incomplete. self.inputScale = self.policy.get("inputScale") self.outputScale = self.policy.get("outputScale") # It is important to use the internal logger provided by the # framework at construction time; this is available as self.log. # You can feel free to create child logs from it in the usual # way: # self.log = Log(self.log, "AreaStage") # # Use this for additional debugging API sugar: self.log = Debug(self.log, "AreaStage") if self.outputScale != 0: self.log.log(Log.INFO, "Area scaling factor: %i"% self.outputScale) self.log.debug(3, "Running with sliceID = %s" % self.getRank()) # preprocess() and postprocess() provide the serial processing; the # former gets executed only on the master node prior to process, and # the latter, afterward. We provide a pre- and postprocess() here # mainly as an example; our excuse is to check that the clipboard has # the inputs we need. def preprocess(self, clipboard): # do our work if clipboard is not None: if not clipboard.contains("width"): raise RuntimeError("Missing width on clipboard") if not clipboard.contains("height"): raise RuntimeError("Missing width on clipboard") # if you are worried about overhead of formatting a debug message, # you can wrap it in an if block if self.log.sends(Log.DEBUG): # this attaches properties to our message LogRec(self.log, Log.DEBUG) << "all input data found." \ << Prop("width", clipboard.get("width")) \ << Prop("height", clipboard.get("height")) \ << LogRec.endr def postprocess(self, clipboard): # We didn't need to provide this; this is identical to the # inherited implementation pass
class WcsDeterminationStageParallel(harnessStage.ParallelProcessing): """Validate the Wcs for an image using the astrometry.net package and calculate distortion coefficients Given a initial Wcs, and a list of sources (with pixel positions for each) in an image, pass these to the astrometry_net package to verify the result. Then calculate the distortion in the image and add that to the Wcs as SIP polynomials Clipboard Input: - an Exposure containing a Wcs - a SourceSet Clipboard Output - A wcs - A vector of SourceMatch objects """ def setup(self): #I don't have the default policy in the correct place yet policyFile = pexPolicy.DefaultPolicyFile( "meas_pipeline", # package name "WcsDeterminationStageDictionary.paf", # default. policy "policy" # dir containing policies ) defaultPolicy = pexPolicy.Policy.createPolicy( policyFile, policyFile.getRepositoryPath()) #The stage can be called with an optional local policy file, which overrides the defaults #merge defaults policyFile = pexPolicy.DefaultPolicyFile( "meas_pipeline", "WcsDeterminationStageDictionary.paf", "policy") defPolicy = pexPolicy.Policy.createPolicy( policyFile, policyFile.getRepositoryPath(), True) if self.policy is None: self.policy = defaultPolicy else: self.policy.mergeDefaults(defaultPolicy) #Setup the astrometry solver path = os.path.join(os.environ['ASTROMETRY_NET_DATA_DIR'], "metadata.paf") self.solver = astromNet.GlobalAstrometrySolution(path) # self.solver.allowDistortion(self.policy.get('allowDistortion')) self.solver.setMatchThreshold(self.policy.get('matchThreshold')) #Setup the log self.log = Debug(self.log, "WcsDeterminationStageParallel") self.log.setThreshold(Log.DEBUG) self.log.log(Log.INFO, "Finished setup of WcsDeterminationStageParallel") def process(self, clipboard): self.log.log(Log.INFO, "Determining Wcs") #Check inputs if clipboard is None: raise RuntimeError("Clipboard is empty") expKey = self.policy.get('inputExposureKey') if not clipboard.contains(expKey): raise RuntimeError("No exposure on clipboard") exp = clipboard.get(expKey) srcSetKey = self.policy.get('inputSourceSetKey') if not clipboard.contains(srcSetKey): raise RuntimeError("No wcsSourceSet on clipboard") srcSet = clipboard.get(srcSetKey) # Determine list of matching sources, and WCS astrom = measAstrom.determineWcs(self.policy, exp, srcSet, solver=self.solver, log=self.log) matchList = astrom.getMatches() if not matchList: matchList = [] wcs = astrom.getWcs() matchListMeta = astrom.getMatchMetadata() #Save results to clipboard smv = afwDet.SourceMatchVector() for m in matchList: smv.push_back(m) psmv = afwDet.PersistableSourceMatchVector(smv, matchListMeta) clipboard.put(self.policy.get('outputMatchListKey'), matchList) clipboard.put(self.policy.get('outputMatchListMetaKey'), matchListMeta) clipboard.put( self.policy.get('outputMatchListKey') + '_persistable', psmv) clipboard.put(self.policy.get('outputWcsKey'), wcs)
class SimpleStageTester(object): """ A class for running simple tests of a Stage. Currently, this will run only one Slice at a time, either a master slice (running the serial preprocess() and postprocess() functions) or a worker slice (running the process() function). The user is responsible for creating the data that goes into the input clipboard. """ def __init__(self, stage=None, name="1", runID="simpleTest", universeSize=1): """create the tester @param stage a Stage instance for the first stage in a simple pipeline. @param name the name to associate with this stage (for display purposes. @param runID the run identifier to provide to the stage @param universeSize the number of parallel threads to pretend are running """ self.stage = stage self.log = Debug("SimpleStageTester") self.event = None self.inQ = Queue() self.outQ = Queue() self.brokerhost = None self.sysdata = {} self.sysdata["runId"] = runID self.sysdata["stageId"] = -1 self.sysdata["universeSize"] = universeSize self.stages = [] if stage: self.stages.append( (name, stage) ) def setEventBroker(self, host, port=None): """ set the event broker to use to get and receiver events. Pass in None for the host to disable receiving events. @param host the host where the event broker is running @param port the port that the event broker is listening to """ self.brokerhost = host if port: self.brokerport = port for stage in self.stages: stage.setEventBroker(self.brokerhost) def getEventBroker(self): return (self.brokerhost, self.brokerport) def addStage(self, stage, name=None, event=None): """ add a stage to this simple pipeline. @param stage the Stage instance to add in order @param name a name for the stage. If None, create one from the stage object @param event an event which must be received prior to execution NOT YET IMPLEMENTED (ignored). """ if name is None: name = str(len(self.stages)) stage.setEventBroker(self.brokerhost) self.stages.append( (name, stage) ) def run(self, clipboard, sliceID): """run the stage as a particular slice, returning the output clipboard @param clipboard a Clipboard or plain dictionary instance containing the Stage input data @param sliceID the number to give as the slice identifier. A negative id number indicates the master slice. """ if (sliceID < 0): return self.runMaster(clipboard, sliceID=sliceID) else: return self.runWorker(clipboard, sliceID) def _makeStages(self, sliceID, isSerial): sysdata = self.sysdata.copy() stages = [] inQ = self.inQ outQ = None for name, stage in self.stages: sysdata['stageId'] = len(stages) + 1 sysdata['name'] = name if (isSerial): newstage = stage.createSerialProcessing(self.log, sysdata) else: newstage = stage.createParallelProcessing(sliceID, self.log, sysdata) stages.append( newstage ) if len(stages) == len(self.stages): outQ = self.outQ else: outQ = Queue() stages[-1].initialize(outQ, inQ) inQ = outQ return stages def runMaster(self, clipboard, sliceID=0): """run the Stage as a master, calling its initialize() function, and then running the serial functions, preprocess() and postprocess() """ if isinstance(clipboard, dict): clipboard = self.toClipboard(clipboard) if not isinstance(clipboard, Clipboard): raise TypeError("runMaster input is not a clipboard") if self.event is not None: clipboard(self.event[0], self.event[1]) stages = self._makeStages(sliceID, True) self.inQ.addDataset(clipboard) for stage in stages: self.log.debug(5, "Calling Stage preprocess() on " + stage.getName()) interQ = stage.applyPreprocess() self.log.debug(5, "Stage preprocess() complete") self.log.debug(5, "Calling Stage postprocess()") stage.applyPostprocess(interQ) self.log.debug(5, "Stage postprocess() complete") return self.outQ.getNextDataset() def runWorker(self, clipboard, sliceID=1): """run the Stage as a worker, running its process() function @param clipboard a Clipboard or plain dictionary instance containing the Stage input data @param sliceID the number to give as the slice identifier; must be >= 0. """ if isinstance(clipboard, dict): clipboard = self.toClipboard(clipboard) if not isinstance(clipboard, Clipboard): raise TypeError("runMaster input is not a clipboard") if self.event is not None: clipboard(self.event[0], self.event[1]) stages = self._makeStages(sliceID, False) self.inQ.addDataset(clipboard) for stage in stages: self.log.debug(5, "Calling Stage process()") stage.applyProcess() self.log.debug(5, "Stage process() complete") return self.outQ.getNextDataset() def setEvent(self, name, eventData): """set data for an event to be received by the Stage prior to being called. This implementation currently does not support array-valued data. @param name the topic name for the event @param eventData the event data, either as a python dictionary or a PropertySet """ if isinstance(eventData, dict): evprop = dafBase.PropertySet() for key in eventData.keys(): evprop.set(key, eventData[key]) eventData = evprop self.event = (name, eventData) def toClipboard(self, data): cb = Clipboard() for key in data.keys(): cb.put(key, data[key]) return cb def setDebugVerbosity(self, verbLimit): """set the verbosity of the default log. This and setLogThreshold() are different APIs that affect the same underlying limit that controls how many messages get logged. @param verbLimit debug messages with a verbosity level larger than this will not be printed. If positive INFO, WARN, and FATAL messages will also be printed. """ Log.getDefaultLog().setThreshold(-1*verbLimit) def setLogThreshold(self, threshold): """set the importance threshold for the default log. This and setDebugVerbosity are different APIs that affect the same underlying limit that controls how many messages get logged. Normally one uses one of the predefined values--Log.DEBUG, Log.INFO, Log.WARN, and Log.FATAL--as input. @param threshold the minimum importance of the message that will get printed. """ Log.getDefaultLog().setThreshold(threshold) def showAllLogProperties(self, show): """control whether log properties are displayed to the screen. These include, for example, the DATE (and time) of the message. @param show if true, show all the properties when a log message is printed. If false, don't show them. """ Log.getDefaultLog().setShowAll(show) def willShowAllLogProperties(self): """return whether log properties are displayed to the screen. These include, for example, the DATE (and time) of the message. """ return Log.getDefaultLog().willShowAll()
class AreaStageSerial(harnessStage.SerialProcessing): """ a simple Stage implmentation that calculates the area of a rectangle given the length of the sides """ def setup(self): """configure this stage with a policy""" # You should create a default policy file that is installed # with your Stage implmenetation's package and merge it with # that policy that wass handed to you by the framework (when # this instance was constructed). # # Here's how you do it. Note that the default policy file can # be a dictionary. Here, we indicated "examples" as the so-called # default policy repository for this package; however normally, # this is "pipeline". file = pexPolicy.DefaultPolicyFile( "pex_harness", # package name "AreaStagePolicy_dict.paf", # def. policy "examples/simpleStageTest" # dir containing policies ) defpol = pexPolicy.Policy.createPolicy(file, file.getRepositoryPath()) if self.policy is None: self.policy = defpol else: self.policy.mergeDefaults(defpol) # now we can configure our pipeline from the policy (which should # now be complete). An exception will be thrown if the merged # policy is incomplete. self.inputScale = self.policy.get("inputScale") self.outputScale = self.policy.get("outputScale") # It is important to use the internal logger provided by the # framework at construction time; this is available as self.log. # You can feel free to create child logs from it in the usual # way: # self.log = Log(self.log, "AreaStage") # # Use this for additional debugging API sugar: self.log = Debug(self.log, "AreaStage") if self.outputScale != 0: self.log.log(Log.INFO, "Area scaling factor: %i" % self.outputScale) self.log.debug(3, "Running with sliceID = %s" % self.getRank()) # preprocess() and postprocess() provide the serial processing; the # former gets executed only on the master node prior to process, and # the latter, afterward. We provide a pre- and postprocess() here # mainly as an example; our excuse is to check that the clipboard has # the inputs we need. def preprocess(self, clipboard): # do our work if clipboard is not None: if not clipboard.contains("width"): raise RuntimeError("Missing width on clipboard") if not clipboard.contains("height"): raise RuntimeError("Missing width on clipboard") # if you are worried about overhead of formatting a debug message, # you can wrap it in an if block if self.log.sends(Log.DEBUG): # this attaches properties to our message LogRec(self.log, Log.DEBUG) << "all input data found." \ << Prop("width", clipboard.get("width")) \ << Prop("height", clipboard.get("height")) \ << LogRec.endr def postprocess(self, clipboard): # We didn't need to provide this; this is identical to the # inherited implementation pass
class SimpleStageTester(object): """ A class for running simple tests of a Stage. Currently, this will run only one Slice at a time, either a master slice (running the serial preprocess() and postprocess() functions) or a worker slice (running the process() function). The user is responsible for creating the data that goes into the input clipboard. """ def __init__(self, stage=None, name="1", runID="simpleTest", universeSize=1): """create the tester @param stage a Stage instance for the first stage in a simple pipeline. @param name the name to associate with this stage (for display purposes. @param runID the run identifier to provide to the stage @param universeSize the number of parallel threads to pretend are running """ self.stage = stage self.log = Debug("SimpleStageTester") self.event = None self.inQ = Queue() self.outQ = Queue() self.brokerhost = None self.sysdata = {} self.sysdata["runId"] = runID self.sysdata["stageId"] = -1 self.sysdata["universeSize"] = universeSize self.stages = [] if stage: self.stages.append((name, stage)) def setEventBroker(self, host, port=None): """ set the event broker to use to get and receiver events. Pass in None for the host to disable receiving events. @param host the host where the event broker is running @param port the port that the event broker is listening to """ self.brokerhost = host if port: self.brokerport = port for stage in self.stages: stage.setEventBroker(self.brokerhost) def getEventBroker(self): return (self.brokerhost, self.brokerport) def addStage(self, stage, name=None, event=None): """ add a stage to this simple pipeline. @param stage the Stage instance to add in order @param name a name for the stage. If None, create one from the stage object @param event an event which must be received prior to execution NOT YET IMPLEMENTED (ignored). """ if name is None: name = str(len(self.stages)) stage.setEventBroker(self.brokerhost) self.stages.append((name, stage)) def run(self, clipboard, sliceID): """run the stage as a particular slice, returning the output clipboard @param clipboard a Clipboard or plain dictionary instance containing the Stage input data @param sliceID the number to give as the slice identifier. A negative id number indicates the master slice. """ if (sliceID < 0): return self.runMaster(clipboard, sliceID=sliceID) else: return self.runWorker(clipboard, sliceID) def _makeStages(self, sliceID, isSerial): sysdata = self.sysdata.copy() stages = [] inQ = self.inQ outQ = None for name, stage in self.stages: sysdata['stageId'] = len(stages) + 1 sysdata['name'] = name if (isSerial): newstage = stage.createSerialProcessing(self.log, sysdata) else: newstage = stage.createParallelProcessing( sliceID, self.log, sysdata) stages.append(newstage) if len(stages) == len(self.stages): outQ = self.outQ else: outQ = Queue() stages[-1].initialize(outQ, inQ) inQ = outQ return stages def runMaster(self, clipboard, sliceID=0): """run the Stage as a master, calling its initialize() function, and then running the serial functions, preprocess() and postprocess() """ if isinstance(clipboard, dict): clipboard = self.toClipboard(clipboard) if not isinstance(clipboard, Clipboard): raise TypeError("runMaster input is not a clipboard") if self.event is not None: clipboard(self.event[0], self.event[1]) stages = self._makeStages(sliceID, True) self.inQ.addDataset(clipboard) for stage in stages: self.log.debug(5, "Calling Stage preprocess() on " + stage.getName()) interQ = stage.applyPreprocess() self.log.debug(5, "Stage preprocess() complete") self.log.debug(5, "Calling Stage postprocess()") stage.applyPostprocess(interQ) self.log.debug(5, "Stage postprocess() complete") return self.outQ.getNextDataset() def runWorker(self, clipboard, sliceID=1): """run the Stage as a worker, running its process() function @param clipboard a Clipboard or plain dictionary instance containing the Stage input data @param sliceID the number to give as the slice identifier; must be >= 0. """ if isinstance(clipboard, dict): clipboard = self.toClipboard(clipboard) if not isinstance(clipboard, Clipboard): raise TypeError("runMaster input is not a clipboard") if self.event is not None: clipboard(self.event[0], self.event[1]) stages = self._makeStages(sliceID, False) self.inQ.addDataset(clipboard) for stage in stages: self.log.debug(5, "Calling Stage process()") stage.applyProcess() self.log.debug(5, "Stage process() complete") return self.outQ.getNextDataset() def setEvent(self, name, eventData): """set data for an event to be received by the Stage prior to being called. This implementation currently does not support array-valued data. @param name the topic name for the event @param eventData the event data, either as a python dictionary or a PropertySet """ if isinstance(eventData, dict): evprop = dafBase.PropertySet() for key in eventData.keys(): evprop.set(key, eventData[key]) eventData = evprop self.event = (name, eventData) def toClipboard(self, data): cb = Clipboard() for key in data.keys(): cb.put(key, data[key]) return cb def setDebugVerbosity(self, verbLimit): """set the verbosity of the default log. This and setLogThreshold() are different APIs that affect the same underlying limit that controls how many messages get logged. @param verbLimit debug messages with a verbosity level larger than this will not be printed. If positive INFO, WARN, and FATAL messages will also be printed. """ Log.getDefaultLog().setThreshold(-1 * verbLimit) def setLogThreshold(self, threshold): """set the importance threshold for the default log. This and setDebugVerbosity are different APIs that affect the same underlying limit that controls how many messages get logged. Normally one uses one of the predefined values--Log.DEBUG, Log.INFO, Log.WARN, and Log.FATAL--as input. @param threshold the minimum importance of the message that will get printed. """ Log.getDefaultLog().setThreshold(threshold) def showAllLogProperties(self, show): """control whether log properties are displayed to the screen. These include, for example, the DATE (and time) of the message. @param show if true, show all the properties when a log message is printed. If false, don't show them. """ Log.getDefaultLog().setShowAll(show) def willShowAllLogProperties(self): """return whether log properties are displayed to the screen. These include, for example, the DATE (and time) of the message. """ return Log.getDefaultLog().willShowAll()
def _publish(self, clipboard, log=None): """ Publish events if required """ if log is None: log = self.log log = Debug(log, "publish") log.debug(4, "Looking for keysToPublish") if not self.policy.exists('keysToPublish'): log.log(Log.WARN, "Did not find keysToPublish in EventStage Policy") return log.debug(4, "Found keysToPublish") publKeyList = self.policy.getArray("keysToPublish") if len(publKeyList) <= 0: log.log(Log.WARN, "Empty keysToPublish in EventStage Policy") return log.debug(4, "Got array: " + str(publKeyList)) for key in publKeyList: log.debug(4, "Got key %s" % key) (eventName, propertySetName) = self._parseKeysToPublish(key) log.debug( 4, "eventName=%s, propertySetName=%s" % (eventName, propertySetName)) psPtr = clipboard.get(propertySetName) log.debug(4, "Got propertySet %s" % psPtr.toString(False)) oneEventTransmitter = \ events.EventTransmitter(self.getEventBrokerHost(), eventName) oneEventTransmitter.publish(psPtr) log.debug(4, 'Published event %s' % key)