Ejemplo n.º 1
0
class FakeOutput(harnessStage.ParallelProcessing):
    """
    this stage simulates work by sleeping
    """

    def setup(self):
        if not self.log:
            self.log = Log.getDefaultLog()
        self.mylog = Log(self.log, "output")
        self.outputDatasetsKey = \
                    self.policy.getString("inputKeys.outputDatasets")
        self.possibleDatasetsKey = \
                    self.policy.getString("inputKeys.possibleDatasets")

    def process(self, clipboard):
        expected = clipboard.get(self.possibleDatasetsKey)
        outputds = clipboard.get(self.outputDatasetsKey)

        # this implementation will pretend to write out all of the
        # expected datasets.  It will also put each dataset written
        # out into the outputDatasets list.
        if expected:
            for ds in expected:
                self.mylog.log(Log.INFO, "Writing out " + ds.toString())
                outputds.append(ds)
        else:
            self.log.log(Log.WARN, "No expected datasets on clipboard")
            

        clipboard.put(self.outputDatasetsKey, outputds)
    def checkExitByVisit(self): 
        log = Log(self.log, "checkExitByVisit")

        if((self._stop.isSet()) and (self.exitLevel == 4)):
            log.log(Log.INFO, "Pipeline stop is set at exitLevel of 4")
            log.log(Log.INFO, "Exit here at the end of the Visit")
            self.forceShutdown = 1
Ejemplo n.º 3
0
    def _link(self, clipboard):
        """
        Link one or more sourcePaths (from policy) to destPaths after
        formatting each with additionalData derived from the clipboard and
        stage information.
        """
        if not self.policy.exists('Links'):
            mylog = Log(Log.defaultLog(),
                        "pex.harness.SymLinkStage.SymLinkStageSerial")
            mylog.log(Log.WARN, "No Links found")
            return

        additionalData = lsst.pex.harness.Utils.createAdditionalData(self, \
                    self.policy, clipboard)

        linkPolicyList = self.policy.getPolicyArray('Links')
        for linkPolicy in linkPolicyList:
            sourcePath = lsst.daf.persistence.LogicalLocation(
                linkPolicy.getString('sourcePath'),
                additionalData).locString()
            destPath = lsst.daf.persistence.LogicalLocation(
                linkPolicy.getString('destPath'), additionalData).locString()
            lsst.pex.logging.Trace("pex.harness.SymLinkStage.SymLinkStageSerial", 3, \
                    "linking %s to %s" % (sourcePath, destPath))
            parentDir = os.path.dirname(destPath)
            if parentDir and not os.path.exists(parentDir):
                os.makedirs(parentDir)
            try:
                os.symlink(sourcePath, destPath)
            except OSError, e:
                # ignore "file exists" but re-raise anything else
                if e.errno != 17:
                    raise e
Ejemplo n.º 4
0
def main():
    """execute the watchLogs script"""

    try:
        (cl.opts, cl.args) = cl.parse_args()
        Log.getDefaultLog().setThreshold(
            run.verbosity2threshold(cl.opts.verbosity, 0))
        if cl.opts.inclhosts:
            cl.opts.inclhosts = cl.opts.inclhosts.split(',')
        if cl.opts.exclhosts:
            cl.opts.exclhosts = cl.opts.exclhosts.split(',')
        hosts = cl.opts.inclhosts
        if not hosts:
            hosts = cl.opts.exclhosts

        if cl.opts.minimport and cl.opts.maximport and \
           cl.opts.minimport > cl.opts.maximport:
            raise run.UsageError(
                "-m value (%i) should be less than -M value (%i)" %
                (cl.opts.minimport, cl.opts.maximport))

        watchLogs(cl.args[0], cl.args[1:], cl.opts.sleep,
                  cl.opts.slice, hosts, not bool(cl.opts.inclhosts),
                  cl.opts.minimport, cl.opts.maximport)

    except run.UsageError, e:
        print >> sys.stderr, "%s: %s" % (cl.get_prog_name(), e)
        sys.exit(1)
Ejemplo n.º 5
0
def main():
    """execute the testEventLogger script"""

    try:
        (cl.opts, cl.args) = cl.parse_args()
        Log.getDefaultLog().setThreshold(
            run.verbosity2threshold(cl.opts.verbosity, 0))

        props = {}
        if cl.opts.stage:
            props["stageId"] = cl.opts.stage
        if cl.opts.pipeline:
            props["pipeline"] = cl.opts.pipeline

        broker = None
        if len(cl.args) > 0:
            broker = cl.args[0]
        input = None
        if cl.opts.stdin:
            input = sys.stdin

        testEventLogger(broker, cl.opts.runid, cl.opts.slice, props, input,
                        cl.opts.logname, cl.opts.logtopic)
    except run.UsageError, e:
        print >> sys.stderr, "%s: %s" % (cl.get_prog_name(), e)
        sys.exit(1)
def main():
    "execute the launchMospipe script"

    logger = Log(Log.getDefaultLog(), "launchMos")
    try:
        (cl.opts, cl.args) = cl.parse_args()
        Log.getDefaultLog().setThreshold(-10 * cl.opts.verbosity)

        if cl.opts.pipeverb is None:
            cl.opts.pipeverb = "trace"

        t = filter(lambda x: x.startswith(cl.opts.datatype.lower()),
                   datatypes.keys())
        if len(t) > 1:
            raise ValueError("Ambiguous data type name: " + cl.opts.datatype)
        if len(t) == 0:
            raise ValueError("Unrecognized data type name: "+ cl.opts.datatype)
        cl.opts.datatype = datatypes[t[0]]

        colls = []
        # parse the collection names
        if cl.opts.colls is not None:
            colls = cl.opts.colls.split(',')

        launchMos(cl.args[0], cl.args[1], cl.args[2:], colls, cl.opts, logger)
        
    except run.UsageError, e:
        print >> sys.stderr, "%s: %s" % (cl.get_prog_name(), e)
        sys.exit(1)
Ejemplo n.º 7
0
class FakeOutput(harnessStage.ParallelProcessing):
    """
    this stage simulates work by sleeping
    """
    def setup(self):
        if not self.log:
            self.log = Log.getDefaultLog()
        self.mylog = Log(self.log, "output")
        self.outputDatasetsKey = \
                    self.policy.getString("inputKeys.outputDatasets")
        self.possibleDatasetsKey = \
                    self.policy.getString("inputKeys.possibleDatasets")

    def process(self, clipboard):
        expected = clipboard.get(self.possibleDatasetsKey)
        outputds = clipboard.get(self.outputDatasetsKey)

        # this implementation will pretend to write out all of the
        # expected datasets.  It will also put each dataset written
        # out into the outputDatasets list.
        if expected:
            for ds in expected:
                self.mylog.log(Log.INFO, "Writing out " + ds.toString())
                outputds.append(ds)
        else:
            self.log.log(Log.WARN, "No expected datasets on clipboard")

        clipboard.put(self.outputDatasetsKey, outputds)
Ejemplo n.º 8
0
 def showAllLogProperties(self, show):
     """control whether log properties are displayed to the screen.  These
     include, for example, the DATE (and time) of the message.
     @param show   if true, show all the properties when a log message is
                      printed.  If false, don't show them.
     """
     Log.getDefaultLog().setShowAll(show)
class IsrCcdSdqaStageParallel(harnessStage.ParallelProcessing):
    """
    Description:

    Policy Dictionary:

    Clipboard Input:

    ClipboardOutput:
    """
    def setup(self):
        self.log = Log(self.log, "CcdSdqaStage -- Parallel")

        policyFile = pexPolicy.DefaultPolicyFile("ip_pipeline",
                "IsrCcdSdqaStageDictionary.paf", "policy")
        defPolicy = pexPolicy.Policy.createPolicy(policyFile, policyFile.getRepositoryPath())

        if self.policy is None:
            self.policy = pexPolicy.Policy()
        self.policy.mergeDefaults(defPolicy)

    def process(self, clipboard):
        """
        """
        self.log.log(Log.INFO, "Calculate SDQA metrics based on the assembled ccd.")
        
        #grab exposure from clipboard
        exposure = clipboard.get(self.policy.getString("inputKeys.ccdExposure"))
        ipIsr.calculateSdqaCcdRatings(exposure)
        #output products
        clipboard.put(self.policy.get("outputKeys.sdqaCcdExposure"),
                exposure)
Ejemplo n.º 10
0
class IsrCcdSdqaStageParallel(harnessStage.ParallelProcessing):
    """
    Description:

    Policy Dictionary:

    Clipboard Input:

    ClipboardOutput:
    """
    def setup(self):
        self.log = Log(self.log, "CcdSdqaStage -- Parallel")

        policyFile = pexPolicy.DefaultPolicyFile(
            "ip_pipeline", "IsrCcdSdqaStageDictionary.paf", "policy")
        defPolicy = pexPolicy.Policy.createPolicy(
            policyFile, policyFile.getRepositoryPath())

        if self.policy is None:
            self.policy = pexPolicy.Policy()
        self.policy.mergeDefaults(defPolicy)

    def process(self, clipboard):
        """
        """
        self.log.log(Log.INFO,
                     "Calculate SDQA metrics based on the assembled ccd.")

        #grab exposure from clipboard
        exposure = clipboard.get(
            self.policy.getString("inputKeys.ccdExposure"))
        ipIsr.calculateSdqaCcdRatings(exposure)
        #output products
        clipboard.put(self.policy.get("outputKeys.sdqaCcdExposure"), exposure)
    def checkExitBySyncPoint(self): 
        log = Log(self.log, "checkExitBySyncPoint")

        if((self._stop.isSet()) and (self.exitLevel == 2)):
            log.log(Log.INFO, "Pipeline stop is set at exitLevel of 2")
            log.log(Log.INFO, "Exit here at a Synchronization point")
            self.forceShutdown = 1
class IsrVarianceStageParallel(harnessStage.ParallelProcessing):
    """
    Description:

    Policy Dictionary:

    Clipboard Input:

    ClipboardOutput:
    """
    def setup(self):
        self.log = Log(self.log, "IsrVarianceStage - parallel")

        policyFile = pexPolicy.DefaultPolicyFile("ip_pipeline", "IsrVarianceStageDictionary.paf", "policy")
        defPolicy = pexPolicy.Policy.createPolicy(policyFile, policyFile.getRepositoryPath())

        if self.policy is None:
            self.policy = pexPolicy.Policy()
        self.policy.mergeDefaults(defPolicy)

    def process(self, clipboard):
        """
        """
        self.log.log(Log.INFO, "Calculating variance from image counts.")
        
        #grab exposure from clipboard
        exposure = clipboard.get(self.policy.getString("inputKeys.exposure"))
        ipIsr.updateVariance(exposure)
        #output products
        clipboard.put(self.policy.get("outputKeys.varianceAddedExposure"), exposure)
def main():
    """execute the testEventLogger script"""

    try:
        (cl.opts, cl.args) = cl.parse_args()
        Log.getDefaultLog().setThreshold(
            run.verbosity2threshold(cl.opts.verbosity, 0))

        props = {}
        if cl.opts.stage:
            props["stageId"] = cl.opts.stage
        if cl.opts.pipeline:
            props["pipeline"] = cl.opts.pipeline

        broker = None
        if len(cl.args) > 0:
            broker = cl.args[0]
        input = None
        if cl.opts.stdin:
            input = sys.stdin

        testEventLogger(broker, cl.opts.runid, cl.opts.slice, props, 
                        input, cl.opts.logname, cl.opts.logtopic)
    except run.UsageError, e:
        print >> sys.stderr, "%s: %s" % (cl.get_prog_name(), e)
        sys.exit(1)
Ejemplo n.º 14
0
 def shutdown(self):
     """
     Shutdown the Slice execution
     """
     shutlog = Log(self.log, "shutdown", Log.INFO)
     shutlog.log(Log.INFO, "Shutting down Slice")
     self.cppSlice.shutdown()
Ejemplo n.º 15
0
def main():
    "execute the launchMospipe script"

    logger = Log(Log.getDefaultLog(), "launchMos")
    try:
        (cl.opts, cl.args) = cl.parse_args()
        Log.getDefaultLog().setThreshold(-10 * cl.opts.verbosity)

        if cl.opts.pipeverb is None:
            cl.opts.pipeverb = "trace"

        t = filter(lambda x: x.startswith(cl.opts.datatype.lower()),
                   datatypes.keys())
        if len(t) > 1:
            raise ValueError("Ambiguous data type name: " + cl.opts.datatype)
        if len(t) == 0:
            raise ValueError("Unrecognized data type name: " +
                             cl.opts.datatype)
        cl.opts.datatype = datatypes[t[0]]

        colls = []
        # parse the collection names
        if cl.opts.colls is not None:
            colls = cl.opts.colls.split(',')

        launchMos(cl.args[0], cl.args[1], cl.args[2:], colls, cl.opts, logger)

    except run.UsageError, e:
        print >> sys.stderr, "%s: %s" % (cl.get_prog_name(), e)
        sys.exit(1)
Ejemplo n.º 16
0
class IsrDarkStageParallel(harnessStage.ParallelProcessing):
    """
    Description:

    Policy Dictionary:

    Clipboard Input:

    ClipboardOutput:
    """
    def setup(self):
        self.log = Log(self.log, "IsrDarkStage - parallel")

        policyFile = pexPolicy.DefaultPolicyFile("ip_pipeline", "IsrDarkStageDictionary.paf", "policy")
        defPolicy = pexPolicy.Policy.createPolicy(policyFile, policyFile.getRepositoryPath())

        if self.policy is None:
            self.policy = pexPolicy.Policy()
        self.policy.mergeDefaults(defPolicy)

    def process(self, clipboard):
        """
        """
        self.log.log(Log.INFO, "Doing dark subtraction.")
        
        #grab exposure and dark from clipboard
        darkexposure = clipboard.get(self.policy.getString("inputKeys.darkexposure"))
        exposure = clipboard.get(self.policy.getString("inputKeys.exposure"))
        darkscaling = darkexposure.getCalib().getExptime()
        expscaling = exposure.getCalib().getExptime()
        ipIsr.darkCorrection(exposure, darkexposure, float(expscaling),
                float(darkscaling))

        #output products
        clipboard.put(self.policy.get("outputKeys.darkSubtractedExposure"), exposure)
Ejemplo n.º 17
0
class IsrVarianceStageParallel(harnessStage.ParallelProcessing):
    """
    Description:

    Policy Dictionary:

    Clipboard Input:

    ClipboardOutput:
    """
    def setup(self):
        self.log = Log(self.log, "IsrVarianceStage - parallel")

        policyFile = pexPolicy.DefaultPolicyFile(
            "ip_pipeline", "IsrVarianceStageDictionary.paf", "policy")
        defPolicy = pexPolicy.Policy.createPolicy(
            policyFile, policyFile.getRepositoryPath())

        if self.policy is None:
            self.policy = pexPolicy.Policy()
        self.policy.mergeDefaults(defPolicy)

    def process(self, clipboard):
        """
        """
        self.log.log(Log.INFO, "Calculating variance from image counts.")

        #grab exposure from clipboard
        exposure = clipboard.get(self.policy.getString("inputKeys.exposure"))
        ipIsr.updateVariance(exposure)
        #output products
        clipboard.put(self.policy.get("outputKeys.varianceAddedExposure"),
                      exposure)
Ejemplo n.º 18
0
 def __init__(self,
              policy,
              log=None,
              stageId=-1,
              eventBroker=None,
              sysdata=None):
     """
     initialize this stage with the policy that defines the stage and
     some contextual system data.  Applications normally do not directly
     call this constructor.  Instead they either construct a Stage subclass
     or create a Stage instance using makeStage() or makeStageFromPolicy().
     
     @param policy       the policy that will configure the SerialProcessing
                            and ParallelProcessing
     @param log          the log object the stage instance should 
                           should use.  If not provided, a default will be
                           used.
     @param eventBroker  the name of the host where the event broker is
                           running.  If not provided, an eventBroker will
                           not be available to the stage.
     @param sysdata      a dictionary of data describing the execution
                           context.  The stage uses this information to
                           set some of its internal data.  See
                           StageProcessing documentation for datails.
                           The name provided in the policy will override
                           the name in this dictionary
     """
     if sysdata is None:
         sysdata = {}
     self.sysdata = sysdata
     self.stagePolicy = policy
     self.eventBroker = eventBroker
     if log is None:
         log = Log(Log.getDefaultLog(), "stage")
     self.log = log
Ejemplo n.º 19
0
 def showAllLogProperties(self, show):
     """control whether log properties are displayed to the screen.  These
     include, for example, the DATE (and time) of the message.
     @param show   if true, show all the properties when a log message is
                      printed.  If false, don't show them.
     """
     Log.getDefaultLog().setShowAll(show)
Ejemplo n.º 20
0
    def process(self, clipboard):
        """
        Execute the needed processing code for this Stage
        """
        print 'Python apps.SyncSetupStageParallel process : stageId %i' % self.stageId
        print 'Python apps.SyncSetupStageParallel process : _rank %i' % self.rank

        root = Log.getDefaultLog()
        log = Log(root,
                  "lsst.pexhexamples.apps.SyncSetupStageParallel.process")

        lr = LogRec(log, Log.INFO)
        lr << "Retrieving Clipboard"
        lr << LogRec.endr

        propertySet = dafBase.PropertySet()
        propertySet.setInt("sliceRank", self.rank)
        propertySet.setString("Level", "Debug")

        clipboard.put("rankKey", propertySet)

        clipboard.setShared("rankKey", True)

        lr = LogRec(log, Log.INFO)
        lr << "Posted data to be Shared on Clipboard"
        lr << LogRec.endr
Ejemplo n.º 21
0
class IsrFringeStageParallel(harnessStage.ParallelProcessing):
    """
    Description:

    Policy Dictionary:

    Clipboard Input:

    ClipboardOutput:
    """
    def setup(self):
        self.log = Log(self.log, "IsrFringeStage - parallel")

        policyFile = pexPolicy.DefaultPolicyFile("ip_pipeline", "IsrFringeStageDictionary.paf", "policy")
        defPolicy = pexPolicy.Policy.createPolicy(policyFile, policyFile.getRepositoryPath())

        if self.policy is None:
            self.policy = pexPolicy.Policy()
        self.policy.mergeDefaults(defPolicy)

    def process(self, clipboard):
        """
        """
        self.log.log(Log.INFO, "Doing Fringe correction.")
        
        #grab exposure from clipboard
        exposure = clipboard.get(self.policy.getString("inputKeys.exposure"))
        fringeexposure = clipboard.get(self.policy.getString("inputKeys.fringeexposure"))
        #fringeCorrection is not implemented
        self.log.log(Log.INFO, "ipIsr.fringeCorrection is not implemented -- \n\tOriginal exposure is returned")
        #ipIsr.fringeCorrection(exposure, fringeexposure)

        #output products
        clipboard.put(self.policy.get("outputKeys.fringeCorrectedExposure"), exposure)
Ejemplo n.º 22
0
 def shutdown(self): 
     """
     Shutdown the Slice execution
     """
     shutlog = Log(self.log, "shutdown", Log.INFO);
     shutlog.log(Log.INFO, "Shutting down Slice")
     self.cppSlice.shutdown()
    def checkExitByStage(self): 
        log = Log(self.log, "checkExitByStage")

        if((self._stop.isSet()) and (self.exitLevel == 3)):
            log.log(Log.INFO, "Pipeline stop is set at exitLevel of 3")
            log.log(Log.INFO, "Exit here at the end of the Stage")
            self.forceShutdown = 1
Ejemplo n.º 24
0
    class _StopThread(threading.Thread):

        def __init__(self, joboffice, stopTopic, runId, brokerHost, 
                     brokerPort=None, waittime=60):

            threading.Thread.__init__(self, name=joboffice.getName()+".stop")
            self.setDaemon(True)
            
            self.jo = joboffice
            self.timeout = waittime

            self.log = Log(self.jo.log, "stop")

            selector = ""
            if runId:  selector = "RUNID='%s'" % runId
                
            if brokerPort:
                self.rcvr = EventReceiver(brokerHost, brokerPort, stopTopic,
                                          selector)
            else:
                self.rcvr = EventReceiver(brokerHost, stopTopic, selector)
                
        def run(self):
            while True:
                event = self.rcvr.receiveEvent(self.timeout)
                if event:
                    self.log.log(Log.INFO-1, "received stop event; " +
                                 "shutting down JobOffice thread")
                    self.jo.stop()
                if self.jo.halt:
                    return
Ejemplo n.º 25
0
class IsrBiasStageParallel(harnessStage.ParallelProcessing):
    """
    Description:

    Policy Dictionary:

    Clipboard Input:

    ClipboardOutput:
    """
    def setup(self):
        self.log = Log(self.log, "IsrBiasStage - parallel")

        policyFile = pexPolicy.DefaultPolicyFile("ip_pipeline", "IsrBiasStageDictionary.paf", "policy")
        defPolicy = pexPolicy.Policy.createPolicy(policyFile, policyFile.getRepositoryPath())

        if self.policy is None:
            self.policy = pexPolicy.Policy()
        self.policy.mergeDefaults(defPolicy)

    def process(self, clipboard):
        """
        """
        self.log.log(Log.INFO, "Doing bias subtraction.")
        
        #grab exposure and bias from clipboard
        biasexposure = clipboard.get(self.policy.getString("inputKeys.biasexposure"))
        exposure = clipboard.get(self.policy.getString("inputKeys.exposure"))
        ipIsr.biasCorrection(exposure, biasexposure)
        #output products
        clipboard.put(self.policy.get("outputKeys.biasSubtractedExposure"), exposure)
Ejemplo n.º 26
0
 def setup(self):
     if not self.log:
         self.log = Log.getDefaultLog()
     self.mylog = Log(self.log, "output")
     self.outputDatasetsKey = \
                 self.policy.getString("inputKeys.outputDatasets")
     self.possibleDatasetsKey = \
                 self.policy.getString("inputKeys.possibleDatasets")
Ejemplo n.º 27
0
def run():
    Log.getDefaultLog()
    memId0 = dafBase.Citizen_getNextMemId()
    main()
    # check for memory leaks
    if dafBase.Citizen_census(0, memId0) != 0:
        print dafBase.Citizen_census(0, memId0), 'Objects leaked:'
        print dafBase.Citizen_census(dafBase.cout, memId0)
Ejemplo n.º 28
0
 def shutdown(self): 
     """
     Shutdown the Slice execution
     """
     shutlog = Log(self.log, "shutdown", Log.INFO);
     pid = os.getpid()
     shutlog.log(Log.INFO, "Shutting down Slice:  pid " + str(pid))
     os.kill(pid, signal.SIGKILL) 
Ejemplo n.º 29
0
 def setup(self):
     if not self.log:
         self.log = Log.getDefaultLog()
     self.mylog = Log(self.log, "output")
     self.outputDatasetsKey = \
                 self.policy.getString("inputKeys.outputDatasets")
     self.possibleDatasetsKey = \
                 self.policy.getString("inputKeys.possibleDatasets")
Ejemplo n.º 30
0
def run():
    Log.getDefaultLog()
    memId0 = dafBase.Citizen_getNextMemId()
    main()
    # check for memory leaks
    if dafBase.Citizen_census(0, memId0) != 0:
        print dafBase.Citizen_census(0, memId0), 'Objects leaked:'
        print dafBase.Citizen_census(dafBase.cout, memId0)
Ejemplo n.º 31
0
 def shutdown(self):
     """
     Shutdown the Slice execution
     """
     shutlog = Log(self.log, "shutdown", Log.INFO)
     pid = os.getpid()
     shutlog.log(Log.INFO, "Shutting down Slice:  pid " + str(pid))
     os.kill(pid, signal.SIGKILL)
Ejemplo n.º 32
0
class DiffImStageParallel(harnessStage.ParallelProcessing):
    """
    Description:
        This stage wraps image subtraction        

    Policy Dictionary:
    lsst/ip/pipeline/policy/DiffImStageDictionary.paf

    Clipboard Input:
    - Template Exposure : to be convolved
    - Science Exposure  : to be matched to

    Clipboard Output:
    - Difference Exposure : resulting difference image
    - Psf Matching Kernel : the spatial model of the Psf matching Kernel
    - Background Function : differential background model
    """
    def setup(self):
        self.log = Log(self.log, "DiffImStage - parallel")
        policyFile = pexPolicy.DefaultPolicyFile("ip_pipeline",
                                                 "DiffImStageDictionary.paf",
                                                 "policy")
        defPolicy = pexPolicy.Policy.createPolicy(
            policyFile,
            policyFile.getRepositoryPath(),  # repos
            True)  # validate

        if self.policy is None:
            self.policy = pexPolicy.Policy()
        self.policy.mergeDefaults(defPolicy.getDictionary())
        self.diffImPolicy = ipDiffim.makeDefaultPolicy()

    def process(self, clipboard):
        """
        Run image subtraction
        """
        self.log.log(Log.INFO, "Running image subtraction")

        # grab exposures from clipboard
        templateExposure = clipboard.get(
            self.policy.getString("inputKeys.templateExposureKey"))
        scienceExposure = clipboard.get(
            self.policy.getString("inputKeys.scienceExposureKey"))

        # run image subtraction
        psfMatch = ipDiffim.ImagePsfMatch(self.diffImPolicy)
        results = psfMatch.subtractExposures(templateExposure, scienceExposure)

        # parse results
        differenceExposure, spatialKernel, backgroundModel, kernelCellSet = results

        #output products
        clipboard.put(self.policy.get("outputKeys.differenceExposureKey"),
                      differenceExposure)
        clipboard.put(self.policy.get("outputKeys.psfMatchingKernelKey"),
                      spatialKernel)
        clipboard.put(self.policy.get("outputKeys.backgroundFunctionKey"),
                      backgroundModel)
Ejemplo n.º 33
0
    def preprocess(self, clipboard):
        """
        Processing code for this Stage to be executed by the main Pipeline 
        prior to invoking Slice process 
        """
        log = Log(self.log,
                  "lsst.pexhexamples.pipeline.SampleStageSerial.preprocess")

        log.log(Log.INFO, 'Executing SampleStageSerial preprocess')
Ejemplo n.º 34
0
 def setup(self):
     if not self.log:
         self.log = Log.getDefaultLog()
     self.mylog = Log(self.log, "fakeProcess")
     self.jobIdentityItem = \
                 self.policy.getString("inputKeys.jobIdentity")
     self.sleeptime = self.policy.getInt("sleep")
     self.visitCount = 0
     self.failOnVisitN = self.policy.getInt("failIteration")
Ejemplo n.º 35
0
 def __init__(self, stageId=-1, policy=None):
     """
     Standard Stage initializer.
     """
     lsst.pex.harness.Stage.Stage.__init__(self, stageId, policy)
     self.mopsLog = Log(Log.getDefaultLog(), 'mops.stage')
     if isinstance(self.mopsLog, log.ScreenLog):
         self.mopsLog.setScreenVerbose(True)
     return
Ejemplo n.º 36
0
 def __init__(self, fullPath=None):
     if fullPath is None:
         pDir = os.environ["CAT_DIR"]
         if pDir is None:
             raise RuntimeError('CAT_DIR env var required')
         fullPath = os.path.join(pDir, 'policy/defaultProdCatPolicy.paf')
     self.policyObj = pexPolicy.Policy.createPolicy(fullPath)
     log = Log(Log.getDefaultLog(), "cat")
     log.log(Log.DEBUG, 'Reading policy from %s' % fullPath)
class SourceMeasurementStageParallel(harnessStage.ParallelProcessing):
    """
    Description:
        This stage wraps the measurement of sources on an exposure.
        The exposures to measure on should be in the clipboard along with the
        FootprintSet(s) to measure on those exposures. The keys for the
        exposures, and the FootprintSet(s) can be specified in the 
        policy file. If not specified, default keys will be used
    """
    def setup(self):
        self.log = Log(self.log, "SourceMeasurementStage - parallel")

        policyFile = pexPolicy.DefaultPolicyFile("meas_pipeline", 
            "SourceMeasurementStageDictionary.paf", "policy")
        defPolicy = pexPolicy.Policy.createPolicy(policyFile, policyFile.getRepositoryPath(), True)

        if self.policy is None:
            self.policy = defPolicy
        else:
            self.policy.mergeDefaults(defPolicy.getDictionary())
        
    def process(self, clipboard):
        """
        Measure sources in the worker process
        """
        self.log.log(Log.INFO, "Measuring Sources in process")
        
        #this may raise exceptions
        try:
            measurePolicy, exposure, psf, positiveDetection, negativeDetection = \
                           self.getClipboardData(clipboard)
        except pexExcept.LsstException, e:
            self.log.log(Log.FATAL, str(e))
         
        #
        # Need to do something smart about merging positive and negative
        # detection sets.             
        #
        # For now, assume they are disjoint sets, so merge is trivial
        #
        footprintLists = []
        if positiveDetection:
            self.log.log(Log.DEBUG, "Positive FootprintSet found")
            isNegative = False
            footprintLists.append([positiveDetection.getFootprints(), isNegative])

        if negativeDetection:
            self.log.log(Log.DEBUG, "Negative FootprintSet found")
            isNegative = True
            footprintLists.append([negativeDetection.getFootprints(), isNegative])

        sourceSet = srcMeas.sourceMeasurement(exposure, psf, footprintLists, measurePolicy)
        
        # place SourceSet on the clipboard
        sourceKey = self.policy.get("outputKeys.sources")
        clipboard.put(sourceKey, sourceSet)
        clipboard.put(sourceKey + "_persistable", afwDet.PersistableSourceVector(sourceSet))
Ejemplo n.º 38
0
    def setup(self):
        self.log = Log(self.log, "simpleDiffImStage - parallel")

        policyFile = pexPolicy.DefaultPolicyFile("ip_pipeline", "SimpleDiffImStageDictionary.paf", "policy")
        defPolicy = pexPolicy.Policy.createPolicy(policyFile, policyFile.getRepositoryPath(), True)

        if self.policy is None:
            self.policy = pexPolicy.Policy()
        self.policy.mergeDefaults(defPolicy.getDictionary())
Ejemplo n.º 39
0
 def setup(self):
     if not self.log:
         self.log = Log.getDefaultLog()
     self.mylog = Log(self.log, "fakeProcess")
     self.jobIdentityItem = \
                 self.policy.getString("inputKeys.jobIdentity")
     self.sleeptime = self.policy.getInt("sleep")
     self.visitCount = 0
     self.failOnVisitN = self.policy.getInt("failIteration")
Ejemplo n.º 40
0
 def setDebugVerbosity(self, verbLimit):
     """set the verbosity of the default log.  This and setLogThreshold()
     are different APIs that affect the same underlying limit that controls
     how many messages get logged.
     @param verbLimit    debug messages with a verbosity level larger
                            than this will not be printed.  If positive
                            INFO, WARN, and FATAL messages will also
                            be printed.  
     """
     Log.getDefaultLog().setThreshold(-1 * verbLimit)
Ejemplo n.º 41
0
 def setLogThreshold(self, threshold):
     """set the importance threshold for the default log.  This and
     setDebugVerbosity are different APIs that affect the same underlying
     limit that controls how many messages get logged.  Normally one
     uses one of the predefined values--Log.DEBUG, Log.INFO, Log.WARN, and
     Log.FATAL--as input.
     @param threshold    the minimum importance of the message that will
                            get printed.
     """
     Log.getDefaultLog().setThreshold(threshold)
Ejemplo n.º 42
0
 def setLogThreshold(self, threshold):
     """set the importance threshold for the default log.  This and
     setDebugVerbosity are different APIs that affect the same underlying
     limit that controls how many messages get logged.  Normally one
     uses one of the predefined values--Log.DEBUG, Log.INFO, Log.WARN, and
     Log.FATAL--as input.
     @param threshold    the minimum importance of the message that will
                            get printed.
     """
     Log.getDefaultLog().setThreshold(threshold)
Ejemplo n.º 43
0
 def setDebugVerbosity(self, verbLimit):
     """set the verbosity of the default log.  This and setLogThreshold()
     are different APIs that affect the same underlying limit that controls
     how many messages get logged.
     @param verbLimit    debug messages with a verbosity level larger
                            than this will not be printed.  If positive
                            INFO, WARN, and FATAL messages will also
                            be printed.  
     """
     Log.getDefaultLog().setThreshold(-1*verbLimit)
    def setup(self):
        self.log = Log(self.log, "CcdAssembly -- Parallel")

        policyFile = pexPolicy.DefaultPolicyFile(
            "ip_pipeline", "IsrCcdAssemblyStageDictionary.paf", "policy")
        defPolicy = pexPolicy.Policy.createPolicy(
            policyFile, policyFile.getRepositoryPath())

        if self.policy is None:
            self.policy = pexPolicy.Policy()
        self.policy.mergeDefaults(defPolicy)
    def setup(self):
        self.log = Log(self.log, "SourceMeasurementStage - parallel")

        policyFile = pexPolicy.DefaultPolicyFile("meas_pipeline", 
            "SourceMeasurementStageDictionary.paf", "policy")
        defPolicy = pexPolicy.Policy.createPolicy(policyFile, policyFile.getRepositoryPath(), True)

        if self.policy is None:
            self.policy = defPolicy
        else:
            self.policy.mergeDefaults(defPolicy.getDictionary())
Ejemplo n.º 46
0
 def setLogThreshold(self, level):
     """
     set the default message importance threshold to be used for 
     recording messages.  This will value be applied to the default
     root (system-wide) logger (or what it will be after logging is 
     initialized) so that all software components are affected.
     """
     if self.log is not None:
         Log.getDefaultLog().setThreshold(level)
         self.log.log(Log.INFO,
                      "Upating Root Log Message Threshold to %i" % level)
     self.logthresh = level
    def setup(self):
        self.log = Log(self.log, "BackgroundEstimationStage - parallel")

        policyFile = pexPolicy.DefaultPolicyFile(
            "meas_pipeline", "BackgroundEstimationStageDictionary.paf",
            "policy")
        defPolicy = pexPolicy.Policy.createPolicy(
            policyFile, policyFile.getRepositoryPath(), True)

        if self.policy is None:
            self.policy = pexPolicy.Policy()
        self.policy.mergeDefaults(defPolicy.getDictionary())
Ejemplo n.º 48
0
 def setLogThreshold(self, level):
     """
     set the default message importance threshold to be used for 
     recording messages.  This will value be applied to the default
     root (system-wide) logger (or what it will be after logging is 
     initialized) so that all software components are affected.
     """
     if self.log is not None:
         Log.getDefaultLog().setThreshold(level)
         self.log.log(Log.INFO, 
                      "Upating Root Log Message Threshold to %i" % level)
     self.logthresh = level
Ejemplo n.º 49
0
    def setup(self):
        self.log = Log(self.log, "ApertureCorrectionApplyStage - parallel")

        # aperture correction policy
        apCorrPolicyFile = pexPolicy.DefaultPolicyFile("meas_pipeline", 
                                                       "ApertureCorrectionApplyStageDictionary.paf", "policy")
        defPolicy = pexPolicy.Policy.createPolicy(apCorrPolicyFile,
                                                  apCorrPolicyFile.getRepositoryPath(), True)
        
        if self.policy is None:
            self.policy = pexPolicy.Policy()
        self.policy.mergeDefaults(defPolicy.getDictionary())
Ejemplo n.º 50
0
    def threadBarrier(self, iStage):
        """
        Create an approximate barrier where all Slices intercommunicate with the Pipeline 
        """

        log = Log(self.log, "threadBarrier")

        self.checkExitBySyncPoint()

        # if((self._stop.isSet()) and (self.exitLevel == 2)):

        #     log.log(Log.INFO, "Pipeline stop is set at exitLevel of 2; exit here at a synchronization point")
        #     print "Pipeline stop is set at exitLevel of 2; exit here at a synchronization point"
        # os._exit()
        #    sys.exit()
        #    log.log(Log.INFO, "Pipeline Ever reach here ?? ")

        entryTime = time.time()
        log.log(Log.DEBUG, "Entry time %f" % (entryTime))

        for i in range(self.nSlices):
            k = 2 * i
            loopEventA = self.loopEventList[k]
            loopEventB = self.loopEventList[k + 1]

            signalTime1 = time.time()
            log.log(Log.DEBUG, "Signal to Slice  %d %f" % (i, signalTime1))

            loopEventA.set()

            log.log(Log.DEBUG, "Wait for signal from Slice %d" % (i))

            # Wait for the B event to be set by the Slice
            # Excute time sleep in between checks to free the GIL periodically
            useDelay = self.barrierDelay

            if (iStage == 1):
                useDelay = 0.1
            if (iStage == 290):
                useDelay = 0.1

            while (not (loopEventB.isSet())):
                time.sleep(useDelay)

            signalTime2 = time.time()
            log.log(
                Log.DEBUG,
                "Done waiting for signal from Slice %d %f" % (i, signalTime2))

            if (loopEventB.isSet()):
                loopEventB.clear()

        self.checkExitBySyncPoint()
Ejemplo n.º 51
0
class CrRejectStageParallel(harnessStage.ParallelProcessing):
    """
    Description:
        This stage wraps estimating and possibly subtracting cosmic rays from an exposure
        on the clipboard.        

    Policy Dictionary:
    lsst/ip/pipeline/policy/CrRejectStageDictionary.paf

    Clipboard Input:
    - Calibrated science Exposure(s) (without background)
    - a PSF may be specified by policy attribute inputPsfKey. Alternatively, the
      stage's policy may request that a psf be constructed, by providing the
      psfPolicy attribute.

    ClipboardOutput:
    - Exposure with CRs removed. Key specified
        by policy attribute 'crSubtractedExposureKey'
    - nCR The number of CRs detected
    - PSF: the psf used to smooth the exposure before detection 
        Key specified by policy attribute 'psfKey'
    """
    def setup(self):
        self.log = Log(self.log, "CrRejectStage - parallel")

        policyFile = pexPolicy.DefaultPolicyFile("ip_pipeline", "CrRejectStageDictionary.paf", "policy")
        defPolicy = pexPolicy.Policy.createPolicy(policyFile, policyFile.getRepositoryPath(), True)

        if self.policy is None:
            self.policy = pexPolicy.Policy()
        self.policy.mergeDefaults(defPolicy.getDictionary())

        self.crRejectPolicy = self.policy.get("crRejectPolicy")

    def process(self, clipboard):
        """
        Detect CRs in the worker process
        """
        self.log.log(Log.INFO, "Detecting CRs in process")
        
        #grab exposure from clipboard
        exposure = clipboard.get(self.policy.get("inputKeys.exposure"))

        defaultFwhm = self.policy.get('parameters.defaultFwhm') # in arcsec
        keepCRs = self.policy.get('parameters.keepCRs')

        crs = ipUtils.cosmicRays.findCosmicRays(exposure, self.crRejectPolicy, defaultFwhm, keepCRs)
        nCR = len(crs)

        #output products
        clipboard.put("nCR", nCR)
        clipboard.put(self.policy.get("outputKeys.exposure"), exposure)
Ejemplo n.º 52
0
def main():
    """execute the showEvents script"""

    try:
        (cl.opts, cl.args) = cl.parse_args()
        Log.getDefaultLog().setThreshold(
            run.verbosity2threshold(cl.opts.verbosity, 0))

        showEvents(cl.args[0], cl.args[1:], cl.opts.sleep)

    except run.UsageError, e:
        print >> sys.stderr, "%s: %s" % (cl.get_prog_name(), e)
        sys.exit(1)
Ejemplo n.º 53
0
class DiffImStageParallel(harnessStage.ParallelProcessing):
    """
    Description:
        This stage wraps image subtraction        

    Policy Dictionary:
    lsst/ip/pipeline/policy/DiffImStageDictionary.paf

    Clipboard Input:
    - Template Exposure : to be convolved
    - Science Exposure  : to be matched to

    Clipboard Output:
    - Difference Exposure : resulting difference image
    - Psf Matching Kernel : the spatial model of the Psf matching Kernel
    - Background Function : differential background model
    """
    def setup(self):
        self.log   = Log(self.log, "DiffImStage - parallel")
        policyFile = pexPolicy.DefaultPolicyFile("ip_pipeline",
                                                 "DiffImStageDictionary.paf", "policy")
        defPolicy  = pexPolicy.Policy.createPolicy(policyFile,
                                                   policyFile.getRepositoryPath(), # repos
                                                   True)                           # validate

        if self.policy is None:
            self.policy = pexPolicy.Policy()
        self.policy.mergeDefaults(defPolicy.getDictionary())
        self.diffImPolicy = ipDiffim.makeDefaultPolicy()

    def process(self, clipboard):
        """
        Run image subtraction
        """
        self.log.log(Log.INFO, "Running image subtraction")
        
        # grab exposures from clipboard
        templateExposure = clipboard.get(self.policy.getString("inputKeys.templateExposureKey"))
        scienceExposure  = clipboard.get(self.policy.getString("inputKeys.scienceExposureKey"))

        # run image subtraction
        psfMatch = ipDiffim.ImagePsfMatch(self.diffImPolicy)
        results = psfMatch.subtractExposures(templateExposure, scienceExposure)
        
        # parse results
        differenceExposure, spatialKernel, backgroundModel, kernelCellSet = results

        #output products
        clipboard.put(self.policy.get("outputKeys.differenceExposureKey"), differenceExposure)
        clipboard.put(self.policy.get("outputKeys.psfMatchingKernelKey"), spatialKernel)
        clipboard.put(self.policy.get("outputKeys.backgroundFunctionKey"), backgroundModel)
def testEventLogger(broker, runid, sliceid, props, input=None, logname=None,
                    logtopic="LSSTLogging"):
    """
    test logging through a broker.  This will send a single message to a 
    the logger, and, if input is a list of string or and file input object,
    it will also send all input message in order.  If broker is None, the
    log messages will not got to any event broker, only to the screen.
    @param broker  the host running the event broker
    @param runid   the runid to assume for the log (ignored if broker is None)
    @param sliceid the sliceid to assume for the log (ignored if broker is None)
    @param props   a set of properties to attach to all messages
    @param input   if not None, messages to send, either in the form of
                      of a list of strings or a file object to read from
    @param logname the log name to send messages to 
    @param logtopic   the event topic to use (def: "LSSTLogging")
    """
    if broker:
        thresh = Log.getDefaultLog().getThreshold()
        setEventLogger(broker, runid, sliceid, thresh <= Log.DEBUG, logtopic)
        Log.getDefaultLog().setThreshold(thresh)
        logger.log(VERB, "Created event logger")
    else:
        logger.log(VERB, "Messages only going to screen")
        logger.setShowAll(Log.getDefaultLog().getThreshold() <= Log.DEBUG)

    if logname is None:
        logname = "showEvents"
    uselog = Log(Log.getDefaultLog(), logname)

    for key in props.keys():
        uselog.addPreamblePropertyString(key, props[key])

    testLogger(uselog, input)
Ejemplo n.º 55
0
    def populateClipboard(self, inputParamPropertySetPtr, iStage, eventTopic):
        """
        Place the event payload onto the Clipboard
        """
        log = Log(self.log, "populateClipboard");
        log.log(Log.DEBUG,'Python Pipeline populateClipboard');

        queue = self.queueList[iStage-1]
        clipboard = queue.element()

        # Slice does not disassemble the payload of the event. 
        # It knows nothing of the contents. 
        # It simply places the payload on the clipboard with key of the eventTopic
        clipboard.put(eventTopic, inputParamPropertySetPtr)
class BackgroundEstimationStageParallel(harnessStage.ParallelProcessing):
    """
    Description:
        This stage wraps estimating and possibly subtracting the background from an exposure
        on the clipboard.        

    Policy Dictionary:
    lsst/meas/pipeline/policy/BackgroundEstimationStageDictionary.paf

    Clipboard Input:
    - Calibrated science Exposure(s) (including background)

    ClipboardOutput:
    - background subtracted Exposure used in the detection. Key specified
        by policy attribute 'backgroundSubtractedExposure'
    - the measured background object itself. Key specified by policy 
        attribute 'background'        
    """
    def setup(self):
        self.log = Log(self.log, "BackgroundEstimationStage - parallel")

        policyFile = pexPolicy.DefaultPolicyFile("meas_pipeline", 
                                                 "BackgroundEstimationStageDictionary.paf", "policy")
        defPolicy = pexPolicy.Policy.createPolicy(policyFile, policyFile.getRepositoryPath(), True)

        if self.policy is None:
            self.policy = pexPolicy.Policy()
        self.policy.mergeDefaults(defPolicy.getDictionary())

    def process(self, clipboard):
        """
        Detect sources in the worker process
        """
        self.log.log(Log.INFO, "Subtracting background in process")
        
        #grab exposure from clipboard
        exposure = clipboard.get(self.policy.get("inputKeys.exposure"))
            
        #estimate and maybe subtract the background
        background, backgroundSubtractedExposure = sourceDetection.estimateBackground(
            exposure,
            self.policy.get("parameters.backgroundPolicy"),
            self.policy.get("parameters.subtractBackground"))

        #output products
        clipboard.put(self.policy.get("outputKeys.background"), background)
        if backgroundSubtractedExposure:
            clipboard.put(self.policy.get("outputKeys.backgroundSubtractedExposure"),
                          backgroundSubtractedExposure)
Ejemplo n.º 57
0
 def __init__(self, policy, log=None, stageId=-1, eventBroker=None,
              sysdata=None):
     """
     initialize this stage with the policy that defines the stage and
     some contextual system data.  Applications normally do not directly
     call this constructor.  Instead they either construct a Stage subclass
     or create a Stage instance using makeStage() or makeStageFromPolicy().
     
     @param policy       the policy that will configure the SerialProcessing
                            and ParallelProcessing
     @param log          the log object the stage instance should 
                           should use.  If not provided, a default will be
                           used.
     @param eventBroker  the name of the host where the event broker is
                           running.  If not provided, an eventBroker will
                           not be available to the stage.
     @param sysdata      a dictionary of data describing the execution
                           context.  The stage uses this information to
                           set some of its internal data.  See
                           StageProcessing documentation for datails.
                           The name provided in the policy will override
                           the name in this dictionary
     """
     if sysdata is None:
         sysdata = {}
     self.sysdata = sysdata
     self.stagePolicy = policy
     self.eventBroker = eventBroker
     if log is None:
         log = Log(Log.getDefaultLog(), "stage")
     self.log = log
Ejemplo n.º 58
0
    def __init__(self, persistDir, logger=None, runId=None,
                 fromSubclass=False):
        """
        create the JobOffice
        @param persistDir   the directory where the blackboard should be
                              persisted
        @param logger       the logger to use.  If not given, the default
                              will be used.
        @param runId        the run ID to restrict our attention to.  If
                              not None, incoming events will be restricted
                              to the given runId.
        """
        self._checkAbstract(fromSubclass, "JobOffice")
        threading.Thread.__init__(self, name="JobOffice")
        self.setDaemon(False)

        self.bb = Blackboard(persistDir)
        self.esys = EventSystem.getDefaultEventSystem()
        self.runId = runId
        self.brokerHost = None
        self.brokerPort = None
        self.originatorId = self.esys.createOriginatorId()

        if logger is None:
            logger = Log.getDefaultLog()
        self.log = logger   # override this in sub-class

        self.halt = False
        self.finalDatasetSent = False
        self.jobOfficeCompletedSent = False
        self.stopTopic = "JobOfficeStop"
        self.stopThread = None
        self.exc = None