Esempio n. 1
0
    def append(self, val):
        try:
            bkgd, interpStyle, undersampleStyle, approxStyle, approxOrderX, approxOrderY, approxWeighting = val
        except TypeError:
            bkgd = val
            interpStyle = None
            undersampleStyle = None
            approxStyle = None
            approxOrderX = None
            approxOrderY = None
            approxWeighting = None

        # Check to see if the Background is actually a BackgroundMI.
        # Such special treatment is not generally a good idea as it is against the whole idea of subclassing.
        # However, lsst.afw.math.makeBackground() returns a Background, even though it's really a BackgroundMI
        # under the covers.  Persistence requires that the type python sees is the actual type under the covers
        # (or it will call the wrong python class's python persistence methods).
        # The real solution is to not use makeBackground() in python but call the constructor directly;
        # however there is already code using makeBackground(), so this is an attempt to assist the user.
        subclassed = afwMath.cast_BackgroundMI(bkgd)
        if subclassed is not None:
            bkgd = subclassed
        else:
            from lsst.pex.logging import getDefaultLog
            getDefaultLog().warn("Unrecognised Background object %s may be unpersistable." % (bkgd,))

        bgInfo = (bkgd, interpStyle, undersampleStyle, approxStyle,
                  approxOrderX, approxOrderY, approxWeighting)
        self._backgrounds.append(bgInfo)
Esempio n. 2
0
    def append(self, val):
        try:
            bkgd, interpStyle, undersampleStyle, approxStyle, approxOrderX, approxOrderY, approxWeighting = val
        except TypeError:
            bkgd = val
            interpStyle = None
            undersampleStyle = None
            approxStyle = None
            approxOrderX = None
            approxOrderY = None
            approxWeighting = None

        # Check to see if the Background is actually a BackgroundMI.
        # Such special treatment is not generally a good idea as it is against the whole idea of subclassing.
        # However, lsst.afw.math.makeBackground() returns a Background, even though it's really a BackgroundMI
        # under the covers.  Persistence requires that the type python sees is the actual type under the covers
        # (or it will call the wrong python class's python persistence methods).
        # The real solution is to not use makeBackground() in python but call the constructor directly;
        # however there is already code using makeBackground(), so this is an attempt to assist the user.
        subclassed = afwMath.cast_BackgroundMI(bkgd)
        if subclassed is not None:
            bkgd = subclassed
        else:
            from lsst.pex.logging import getDefaultLog
            getDefaultLog().warn(
                "Unrecognised Background object %s may be unpersistable." %
                (bkgd, ))

        bgInfo = (bkgd, interpStyle, undersampleStyle, approxStyle,
                  approxOrderX, approxOrderY, approxWeighting)
        self._backgrounds.append(bgInfo)
Esempio n. 3
0
def getEnvironmentPackages():
    """Provide a dict of products and their versions from the environment

    We use EUPS to determine the version of certain products (those that don't provide
    a means to determine the version any other way) and to check if uninstalled packages
    are being used. We only report the product/version for these packages.
    """
    try:
        from eups import Eups
        from eups.Product import Product
    except:
        from lsst.pex.logging import getDefaultLog
        getDefaultLog().warn("Unable to import eups, so cannot determine package versions from environment")
        return {}

    # Cache eups object since creating it can take a while
    global _eups
    if not _eups:
        _eups = Eups()
    products = _eups.findProducts(tags=["setup"])

    # Get versions for things we can't determine via runtime mechanisms
    # XXX Should we just grab everything we can, rather than just a predetermined set?
    packages = {prod.name: prod.version for prod in products if prod in ENVIRONMENT}

    # The string 'LOCAL:' (the value of Product.LocalVersionPrefix) in the version name indicates uninstalled
    # code, so the version could be different than what's being reported by the runtime environment (because
    # we don't tend to run "scons" every time we update some python file, and even if we did sconsUtils
    # probably doesn't check to see if the repo is clean).
    for prod in products:
        if not prod.version.startswith(Product.LocalVersionPrefix):
            continue
        ver = prod.version

        gitDir = os.path.join(prod.dir, ".git")
        if os.path.exists(gitDir):
            # get the git revision and an indication if the working copy is clean
            revCmd = ["git", "--git-dir=" + gitDir, "--work-tree=" + prod.dir, "rev-parse", "HEAD"]
            diffCmd = ["git", "--no-pager", "--git-dir=" + gitDir, "--work-tree=" + prod.dir, "diff",
                       "--patch"]
            try:
                rev = subprocess.check_output(revCmd).decode().strip()
                diff = subprocess.check_output(diffCmd)
            except:
                ver += "@GIT_ERROR"
            else:
                ver += "@" + rev
                if diff:
                    ver += "+" + hashlib.md5(diff).hexdigest()
        else:
            ver += "@NO_GIT"

        packages[prod.name] = ver
    return packages
Esempio n. 4
0
class MO(object):
    """Measure the sources on a frame"""
    def __init__(self, display = False, rhs = None):

        self.display = display
        self.gas = None

        if rhs:
            try:
                self.exposure = rhs.exposure
                self.gas = rhs.gas
                self.pixscale = rhs.pixscale
                self.psf = rhs.psf
                self.sourceList = rhs.sourceList
                self.XY0 = rhs.XY0

                try:
                    self.psfImage = rhs.psfImage
                except AttributeError:
                    pass
            except AttributeError, e:
                raise RuntimeError, ("Unexpected rhs: %s (%s)" % (rhs, e))

        self.display = display

        # set up a log
        self.scriptLog = pexLog.getDefaultLog()
        self.scriptLog.setThreshold(pexLog.Log.WARN)
        self.log = pexLog.Log(self.scriptLog, "measureSources")
Esempio n. 5
0
 def __init__(self, config):
     """
     @param[in] config: instance of PolypixPsfDeterminerConfig
     """
     self.config = config
     # N.b. name of component is meas.algorithms.psfDeterminer so you can turn on psf debugging
     # independent of which determiner is active
     self.debugLog = pexLog.Debug("meas.algorithms.psfDeterminer")
     self.warnLog = pexLog.Log(pexLog.getDefaultLog(), "meas.algorithms.psfDeterminer")
Esempio n. 6
0
    def testLogging(self):

        # test a simple message to the default log
        dlog = log.getDefaultLog()
        dlog.log(log.Log.WARN, "this is a warning")

        # now let's create our own root log
        logger = log.ScreenLog(True)

        # test creation of child log
        tlog = log.Log(logger, "test")
        tlog.log(log.Log.INFO, "I like your hat")

        # test that "format", "infof", etc are ignored by swig.
        self.assertFalse(hasattr(tlog, 'format'))
        self.assertFalse(hasattr(tlog, 'debugf'))
        self.assertFalse(hasattr(tlog, 'infof'))
        self.assertFalse(hasattr(tlog, 'warnf'))
        self.assertFalse(hasattr(tlog, 'fatalf'))

        # test shortcut methods
        tlog.logdebug("Shortcut debug")
        tlog.info("Shortcut info")
        tlog.warn("Shortcut warn")
        tlog.fatal("Shortcut fatal")

        # test threshold filtering
        tlog.setThreshold(log.Log.WARN)
        tlog.log(log.Log.INFO, "I like your gloves")  #  // shouldn't see this

        # test the persistance of threshold levels
        tlog = log.Log(logger, "test")
        tlog.log(log.Log.INFO, "I like your shoes")  #   // shouldn't see this
        tlog.setThreshold(log.Log.DEBUG)
        tlog.log(log.Log.INFO, "I said, I like your shoes")

        # test descendent log and ancestor's control of threshold
        tgclog = log.Log(tlog,
                         "grand.child")  #   // name is now "test.grand.child"
        tgclog.log(log.Log.INFO, "Let's play")
        tlog.setThreshold(log.Log.FATAL)
        tgclog.log(log.Log.INFO, "You go first")

        # test streaming
        log.LogRec(tgclog, log.Log.FATAL) << "help: I've fallen" << log.Prop(
            "NODE", 5) << "& I can't get up" << log.endr
        tmp = log.Prop("NODE", 5)
        log.LogRec(
            tgclog, log.Log.FATAL
        ) << "help: I've fallen" << tmp << "& I can't get up" << log.endr

        # test flushing on delete
        log.Rec(tgclog, log.Log.FATAL) << "never mind"
        log.Rec(tgclog, log.Log.DEBUG) << "testing" << log.endr
Esempio n. 7
0
def importMatplotlib():
    """!Import matplotlib.pyplot when needed, warn if not available.
    @return the imported module if available, else False.

    """
    try:
        import matplotlib.pyplot as plt
        return plt
    except ImportError as err:
        log = pexLog.Log(pexLog.getDefaultLog(), 'lsst.ip.diffim.utils',
                         pexLog.INFO)
        log.warn('Unable to import matplotlib: %s' % err)
        return False
Esempio n. 8
0
def importMatplotlib():
    """!Import matplotlib.pyplot when needed, warn if not available.
    @return the imported module if available, else False.

    """
    try:
        import matplotlib.pyplot as plt
        return plt
    except ImportError as err:
        log = pexLog.Log(pexLog.getDefaultLog(),
                         'lsst.ip.diffim.utils', pexLog.INFO)
        log.warn('Unable to import matplotlib: %s' % err)
        return False
 def testOverrides(self):
     """Test config and log override
     """
     config = TestTask.ConfigClass()
     config.floatField = -99.9
     defLog = pexLog.getDefaultLog()
     log = pexLog.Log(defLog, "cmdLineTask")
     retVal = TestTask.parseAndRun(
         args=[DataPath, "--output", self.outPath, "--id", "visit=2"],
         config = config,
         log = log
     )
     self.assertEquals(retVal.parsedCmd.config.floatField, -99.9)
     self.assertTrue(retVal.parsedCmd.log is log)
Esempio n. 10
0
    def testLogging(self):

        # test a simple message to the default log
        dlog = log.getDefaultLog()
        dlog.log(log.Log.WARN, "this is a warning")

        # now let's create our own root log
        logger = log.ScreenLog(True)

        # test creation of child log
        tlog = log.Log(logger, "test")
        tlog.log(log.Log.INFO, "I like your hat")

        # test that "format", "infof", etc are ignored by swig.
        self.assertFalse(hasattr(tlog, 'format'))
        self.assertFalse(hasattr(tlog, 'debugf'))
        self.assertFalse(hasattr(tlog, 'infof'))
        self.assertFalse(hasattr(tlog, 'warnf'))
        self.assertFalse(hasattr(tlog, 'fatalf'))

        # test shortcut methods
        tlog.logdebug("Shortcut debug")
        tlog.info("Shortcut info")
        tlog.warn("Shortcut warn")
        tlog.fatal("Shortcut fatal")

        # test threshold filtering
        tlog.setThreshold(log.Log.WARN)
        tlog.log(log.Log.INFO, "I like your gloves")  #  // shouldn't see this

        # test the persistance of threshold levels
        tlog = log.Log(logger, "test")
        tlog.log(log.Log.INFO, "I like your shoes")  #   // shouldn't see this
        tlog.setThreshold(log.Log.DEBUG)
        tlog.log(log.Log.INFO, "I said, I like your shoes")

        # test descendent log and ancestor's control of threshold
        tgclog = log.Log(tlog, "grand.child")  #   // name is now "test.grand.child"
        tgclog.log(log.Log.INFO, "Let's play")
        tlog.setThreshold(log.Log.FATAL)
        tgclog.log(log.Log.INFO, "You go first")

        # test streaming
        log.LogRec(tgclog, log.Log.FATAL) << "help: I've fallen" << log.Prop("NODE", 5) << "& I can't get up" << log.endr;
        tmp = log.Prop("NODE", 5)
        log.LogRec(tgclog, log.Log.FATAL) << "help: I've fallen" << tmp << "& I can't get up" << log.endr;

        # test flushing on delete
        log.Rec(tgclog, log.Log.FATAL) << "never mind"
        log.Rec(tgclog, log.Log.DEBUG) << "testing" << log.endr;
Esempio n. 11
0
    def __init__(self, config=None, name=None, parentTask=None, log=None):
        """!Create a Task
        
        @param[in] config       configuration for this task (an instance of self.ConfigClass,
            which is a task-specific subclass of lsst.pex.config.Config), or None. If None:
            - If parentTask specified then defaults to parentTask.config.\<name>
            - If parentTask is None then defaults to self.ConfigClass()
        @param[in] name         brief name of task, or None; if None then defaults to self._DefaultName
        @param[in] parentTask   the parent task of this subtask, if any.
            - If None (a top-level task) then you must specify config and name is ignored.
            - If not None (a subtask) then you must specify name
        @param[in] log          pexLog log; if None then the default is used;
            in either case a copy is made using the full task name.
        
        @throw RuntimeError if parentTask is None and config is None.
        @throw RuntimeError if parentTask is not None and name is None.
        @throw RuntimeError if name is None and _DefaultName does not exist.
        """
        self.metadata = dafBase.PropertyList()
        self._parentTask = parentTask

        if parentTask != None:
            if name is None:
                raise RuntimeError("name is required for a subtask")
            self._name = name
            self._fullName = parentTask._computeFullName(name)
            if config == None:
                config = getattr(parentTask.config, name)
            self._taskDict = parentTask._taskDict
            self.log = pexLog.Log(parentTask.log, name)
        else:
            if name is None:
                name = getattr(self, "_DefaultName", None)
                if name is None:
                    raise RuntimeError("name is required for a task unless it has attribute _DefaultName")
                name = self._DefaultName
            self._name = name
            self._fullName = self._name
            if config == None:
                config = self.ConfigClass()
            self._taskDict = dict()
            if log is None:
                log = pexLog.getDefaultLog()
            self.log = pexLog.Log(log, self._fullName)
  
        self.config = config
        self._display = lsstDebug.Info(self.__module__).display
        self._taskDict[self._fullName] = self
Esempio n. 12
0
def _poolFunctionWrapper(function, arg):
    """Wrapper around function to catch exceptions that don't inherit from Exception

    Such exceptions aren't caught by multiprocessing, which causes the slave
    process to crash and you end up hitting the timeout.
    """
    try:
        return function(arg)
    except Exception:
        raise # No worries
    except:
        # Need to wrap the exception with something multiprocessing will recognise
        cls, exc, tb = sys.exc_info()
        log = getDefaultLog()
        log.warn("Unhandled exception %s (%s):\n%s" % (cls.__name__, exc, traceback.format_exc()))
        raise Exception("Unhandled exception: %s (%s)" % (cls.__name__, exc))
Esempio n. 13
0
    def __call__(self, args):
        """!Run the Task on a single target.

        This default implementation assumes that the 'args' is a tuple
        containing a data reference and a dict of keyword arguments.

        @warning if you override this method and wish to return something when
        doReturnResults is false, then it must be picklable to support
        multiprocessing and it should be small enough that pickling and
        unpickling do not add excessive overhead.

        @param args     Arguments for Task.run()

        @return:
        - None if doReturnResults false
        - A pipe_base Struct containing these fields if doReturnResults true:
            - dataRef: the provided data reference
            - metadata: task metadata after execution of run
            - result: result returned by task run, or None if the task fails
        """
        dataRef, kwargs = args
        if self.log is None:
            self.log = getDefaultLog()
        if hasattr(dataRef, "dataId"):
            self.log.addLabel(str(dataRef.dataId))
        elif isinstance(dataRef, (list, tuple)):
            self.log.addLabel(str([ref.dataId for ref in dataRef if hasattr(ref, "dataId")]))
        task = self.makeTask(args=args)
        result = None # in case the task fails
        if self.doRaise:
            result = task.run(dataRef, **kwargs)
        else:
            try:
                result = task.run(dataRef, **kwargs)
            except Exception, e:
                # don't use a try block as we need to preserve the original exception
                if hasattr(dataRef, "dataId"):
                    task.log.fatal("Failed on dataId=%s: %s" % (dataRef.dataId, e))
                elif isinstance(dataRef, (list, tuple)):
                    task.log.fatal("Failed on dataId=[%s]: %s" %
                                   (",".join([str(_.dataId) for _ in dataRef]), e))
                else:
                    task.log.fatal("Failed on dataRef=%s: %s" % (dataRef, e))

                if not isinstance(e, TaskError):
                    traceback.print_exc(file=sys.stderr)
    def __init__(self, filenameList, healpix, nside):
        """!Constructor

        @param filenameList  List of filenames; first is the multiindex, then
                             follows the individual index files
        @param healpix       Healpix number
        @param nside         Healpix nside
        """
        if len(filenameList) < 2:
            raise RuntimeError("Insufficient filenames provided for multiindex (%s): expected >= 2" %
                               (filenameList,))
        self._filenameList = filenameList
        self._healpix = int(healpix)
        self._nside = int(nside)
        self._mi = None
        self._loaded = False
        self.log = getDefaultLog()
Esempio n. 15
0
    def __init__(self,
                 script,
                 importList=None,
                 command=None,
                 resourceList=None,
                 queue=None):
        """Initialisation

        @param script Text of script to execute
        @param importList List of imports; may be specified as a 2-tuple for 'import X as Y'
        @param command Command to run to submit to queue ("qsub -V")
        @param resourceList List of resources for PBS
        @param queue Name of queue
        """

        # Remove common indentation
        lines = re.split("\n", script)
        exemplar = None  # First non-blank line
        for line in lines:
            if re.search("\S", line):
                exemplar = line
                break
        if exemplar is None:
            raise RuntimeError("Empty script provided.")
        match = re.match("(\s+)", exemplar)
        if match:
            indent = match.group(0)  # Indentation used
            newLines = []
            for line in lines:
                if not re.search("\S", line):
                    continue
                newLine = re.sub("^" + indent, "", line)
                if newLine is None:
                    raise RuntimeError("Inconsistent indentation in script: " +
                                       script)
                newLines.append(newLine)
            script = "\n".join(newLines)

        self.script = script
        self.importList = importList
        self.command = command if command is not None else "qsub -V"
        self.resourceList = resourceList
        self.queue = queue
        self.log = pexLog.Log(pexLog.getDefaultLog(), "PbsQueue")
        return
Esempio n. 16
0
    def __init__(
            self,  # ReadWrite
            mappers,  # Mapper or mapper class to use
            ccdKeys,  # Data keywords required to specify a CCD
            fileKeys=None,  # Data keywords required to specify a file
            config=None,  # Configuration
    ):
        """Initialisation

        @param mapper Data mapper (class or instance) for persistence
        @param config Configuration (for instantiating mapper)
        """

        # if we got a list, it contains [inMapper, outMapper]
        if isinstance(mappers, list) and len(mappers) == 2:
            inMapper, outMapper = mappers
        # if we got a mapper, use it for both input and output
        elif (isinstance(mappers, dafPersist.Mapper)
              or issubclass(mappers, dafPersist.Mapper)):
            inMapper, outMapper = mappers, mappers
        # punt
        else:
            raise RuntimeError(
                "'mapper' must be a dafPersist.Mapper (or derived from), or a list containing two of them (in and out)."
            )

        self.log = pexLog.Log(pexLog.getDefaultLog(), "ReadWrite")

        self.inMapper = initMapper(inMapper, config, self.log, inMap=True)
        self.ibf = dafPersist.ButlerFactory(mapper=self.inMapper)
        self.inButler = self.ibf.create()

        self.outMapper = initMapper(outMapper, config, self.log, inMap=False)
        self.obf = dafPersist.ButlerFactory(mapper=self.outMapper)
        self.outButler = self.obf.create()

        self.ccdKeys = ccdKeys
        if fileKeys is None:
            fileKeys = list(ccdKeys)
        if isinstance(fileKeys, basestring):
            fileKeys = [fileKeys]
        self.fileKeys = fileKeys
        return
    def __init__(self, rerun=0, basedir='.', **kwargs):
        Mapper.__init__(self)

        print 'TractorMapper(): ignoring kwargs', kwargs

        self.basedir = basedir
        self.rerun = rerun
        self.log = pexLog.Log(pexLog.getDefaultLog(), 'TractorMapper')

        indir = os.path.join(self.basedir, 't%(visit)04i')
        outdir = os.path.join(indir, 'rr%(rerun)04i')
        self.filenames = { 'outdir': (outdir, None, None),
                           'visitim': (os.path.join(indir, 't.fits'), #'t_img.fits'), #img.fits'),
                                       'lsst.afw.image.ExposureF', 'ExposureF'),
                           'psf': (os.path.join(outdir, 'psf.boost'),
                                   'lsst.afw.detection.Psf', 'Psf'),
                           'src': (os.path.join(outdir, 'src.boost'),
                                   # dare to dream / keep dreaming
                                   #os.path.join(outdir, 'src.fits'),
                                   # htf did this work before?
                                   #'lsst.afw.detection.Source', 'Source'),
                                   'lsst.afw.detection.PersistableSourceVector',
                                   'PersistableSourceVector'),
                           'bb': (os.path.join(outdir, 'bb.pickle'),
                                  None, None),
                           'pyfoots': (os.path.join(outdir, 'foots.pickle'),
                                       None, None),
                           'footprints': (os.path.join(outdir, 'foots.boost'),
                                          'lsst.afw.detection.FootprintList',
                                          'FootprintList'),
                           'truesrc': (os.path.join(indir, 'srcs.fits'),
                                       None, None),
                           }
        '''
        for datasetType in ["raw", "bias", "dark", "flat", "fringe",
            "postISR", "postISRCCD", "sdqaAmp", "sdqaCcd",
            "icSrc", "icMatch", "visitim", "psf", "apCorr", "calexp", "src",
            "sourceHist", "badSourceHist", "source", "badSource",
            "invalidSource", "object", "badObject"]:
            '''
        self.keys = ['visit', 'filter']
Esempio n. 18
0
 def __init__(self, config=None, log=None):
     self.config = config
     if log is None: log = pexLog.getDefaultLog()
     self.log = pexLog.Log(log, self.__class__.__name__)
     self._display = lsstDebug.Info(__name__).display
Esempio n. 19
0
    def testAlgorithms(self):
        """Test that we can instantiate and use algorithms"""

        config = measAlg.SourceMeasurementConfig()
        config.algorithms.names = measAlg.AlgorithmRegistry.all.keys()
        config.algorithms.names.discard(config.centroider.name)
        config.doReplaceWithNoise = False

        config.algorithms.names.discard("flux.peakLikelihood")

        if False:
            log = pexLog.getDefaultLog()
            log.setThreshold(log.DEBUG)

        schema = afwTable.SourceTable.makeMinimalSchema()
        task = measAlg.SourceMeasurementTask(schema, config=config)
        catalog = afwTable.SourceCatalog(schema)
        source = catalog.addNew()
        source.set("id", 12345)

        size = 256
        xStar, yStar = 65.432, 76.543
        width = 3.21
        x0, y0 = 12345, 54321
        x, y = numpy.indices((size, size))
        im = afwImage.MaskedImageF(afwGeom.ExtentI(size, size))
        im.setXY0(afwGeom.Point2I(x0, y0))
        im.getVariance().set(1.0)
        arr = im.getImage().getArray()
        arr[y, x] = numpy.exp(-0.5 * ((x - xStar) ** 2 + (y - yStar) ** 2) / width ** 2)
        psf = testLib.makeTestPsf(im)
        exp = afwImage.makeExposure(im)
        exp.setPsf(psf)
        exp.setXY0(afwGeom.Point2I(x0, y0))
        scale = 1.0e-5
        wcs = afwImage.makeWcs(
            afwCoord.Coord(0.0 * afwGeom.degrees, 0.0 * afwGeom.degrees),
            afwGeom.Point2D(0.0, 0.0),
            scale,
            0.0,
            0.0,
            scale,
        )
        exp.setWcs(wcs)

        point = afwGeom.Point2I(int(xStar + x0), int(yStar + y0))
        bbox = im.getBBox()
        bbox.shift(afwGeom.Extent2I(x0, y0))
        foot = afwDetection.Footprint(point, width, bbox)
        foot.addPeak(point.getX(), point.getY(), 1.0)
        afwDetection.setMaskFromFootprint(exp.getMaskedImage().getMask(), foot, 1)
        source.setFootprint(foot)

        if display:
            ds9.mtv(exp, frame=1)

        task.run(exp, catalog)

        for alg in config.algorithms:
            flagName = alg + ".flags"
            if False:
                print(
                    alg,
                    source.get(flagName) if flagName in schema else None,
                    source.get(alg) if alg in schema else None,
                )
            elif flagName in schema:
                self.assertFalse(source.get(alg + ".flags"))
Esempio n. 20
0
"""test simple use of the Log facility."""
"""
testLog

Run with:
   python testLog
"""

import lsst.utils.tests as tests
import lsst.pex.logging as log

if __name__ == "__main__":

    # test a simple message to the default log
    dlog = log.getDefaultLog()
    dlog.log(log.Log.WARN, "this is a warning")

    # now let's create our own root log
    logger = log.ScreenLog(True)

    # test creation of child log
    tlog = log.Log(logger, "test")
    tlog.log(log.Log.INFO, "I like your hat")

    # test that "format", "infof", etc are ignored by swig.
    assert(not hasattr(tlog, 'format'))
    assert(not hasattr(tlog, 'debugf'))
    assert(not hasattr(tlog, 'infof'))
    assert(not hasattr(tlog, 'warnf'))
    assert(not hasattr(tlog, 'fatalf'))
def run(visit, rerun, config):
    mapper = getMapper()
    dataId = { 'visit': visit, 'rerun': rerun }
    rrdir = mapper.getPath('outdir', dataId)
    if not os.path.exists(rrdir):
        print 'Creating directory for ouputs:', rrdir
        os.makedirs(rrdir)
    else:
        print 'Output directory:', rrdir
    io = pipReadWrite.ReadWrite(mapper, ['visit'], config=config)
    #ccdProc = pipProcCcd.ProcessCcd(config=config, Isr=NullISR, Calibrate=MyCalibrate)
    #raws = io.readRaw(dataId)
    #detrends = io.detrends(dataId, config)
    print 'Reading exposure'
    #exposure = io.read('visitim', dataId)
    detrends = []
    exposure = io.inButler.get('visitim', dataId)
    print 'exposure is', exposure
    print 'size', exposure.getWidth(), 'x', exposure.getHeight()
    # debug
    mi = exposure.getMaskedImage()
    #img = mi.getImage()
    #var = mi.getVariance()
    #print 'var at 90,100 is', var.get(90,100)
    #print 'img at 90,100 is', img.get(90,100)
    #print 'wcs is', exposure.getWcs()
    wcs = exposure.getWcs()
    assert wcs
    #print 'ccdProc.run()...'
    # raws = [exposure]
    #exposure, psf, apcorr, brightSources, sources, matches, matchMeta = ccdProc.run(raws, detrends)
    print 'Calibrate()...'
    log = pexLog.getDefaultLog()
    cal = MyCalibrate(config=config, log=log, Photometry=MyPhotometry)
    psf,sources,footprints = cal.run2(exposure)

    print 'Photometry()...'
    phot = pipPhot.Photometry(config=config, log=log)
    sources, footprints = phot.run(exposure, psf)
    print 'sources:', len(sources)
    for s in sources:
        print '  ', s, s.getXAstrom(), s.getYAstrom(), s.getPsfFlux(), s.getIxx(), s.getIyy(), s.getIxy()

    print 'footprints:', footprints
    # oh yeah, baby!
    fps = footprints.getFootprints()
    print len(fps)
    bb = []
    for f in fps:
        print '  Footprint', f
        print '  ', f.getBBox()
        bbox = f.getBBox()
        bb.append((bbox.getMinX(), bbox.getMinY(), bbox.getMaxX(), bbox.getMaxY()))
        print '   # peaks:', len(f.getPeaks())
        for p in f.getPeaks():
            print '    Peak', p
    #print 'psf', psf
    #print 'sources', sources
    #print 'footprints', footprints
    #psf, apcorr, brightSources, matches, matchMeta = self.calibrate(exposure, defects=defects)
    #if self.config['do']['phot']:
    #    sources, footprints = self.phot(exposure, psf, apcorr, wcs=exposure.getWcs())
    #psf, wcs = self.fakePsf(exposure)
    #sources, footprints = self.phot(exposure, psf)
    #sources = self.rephot(exposure, footprints, psf, apcorr=apcorr)
    #model = calibrate['model']
    #fwhm = calibrate['fwhm'] / wcs.pixelScale()
    #size = calibrate['size']
    # psf = afwDet.createPsf(model, size, size, fwhm/(2*math.sqrt(2*math.log(2))))
    #print 'done!'
    print 'writing output...'
    io.write(dataId, psf=psf, sources=sources)
    print 'done!'
    print 'Writing bounding-boxes...'
    io.outButler.put(bb, 'bb', dataId)

    #print 'Writing footprints...'
    #io.outButler.put(fps, 'footprints', dataId)

    # serialize a python version of footprints & peaks
    pyfoots = footprintsToPython(fps)
    print 'Writing py footprints...'
    io.outButler.put(pyfoots, 'pyfoots', dataId)

    return bb
Esempio n. 22
0
# You should have received a copy of the LSST License Statement and
# the GNU General Public License along with this program.  If not,
# see <http://www.lsstcorp.org/LegalNotices/>.
#
"""
@brief Demonstrate the use of the logging framework from Python
"""

import lsst.pex.logging as log
from lsst.pex.logging import Rec, Prop, endr

if __name__ == "__main__":

    # in any script or function where you want to log message, the first
    # thing you should do is get create a Log from the DefaultLog
    mylog = log.Log(log.getDefaultLog(), "myapp.myfunc")

    # simple message can be sent with a function.  The first argument is
    # the level of message; choices are: DEBUG, INFO, WARN, FATAL.
    mylog.log(mylog.INFO, "this is a simple message")

    # If you want to send multiple messages and/or properties all in the
    # same message, you can use the shift operator.  Be sure to end the
    # message with "endr"
    Rec(mylog, mylog.WARN) << "ouch!" << Prop("NODE", 5) \
                           << "something bad happened!" << endr

    # Normally properties are not printed to the screen.  To see these, we'll
    # turn them on now.
    #
    # Outside the pipeline framework, the default logger is a ScreenLog.
Esempio n. 23
0
    def readFits(fileName, hdu=0, flags=0):
        """Read a ds9 region file, returning a ObjectMaskCatalog object

        This method is called "readFits" to fool the butler. The corresponding mapper entry looks like
        brightObjectMask: {
            template:      "deepCoadd/BrightObjectMasks/%(tract)d/BrightObjectMask-%(tract)d-%(patch)s-%(filter)s.reg"
            python:        "lsst.obs.subaru.objectMasks.ObjectMaskCatalog"
            persistable:   "PurePythonClass"
            storage:       "FitsCatalogStorage"
        }
        and this is the only way I know to get it to read a random file type, in this case a ds9 region file

        This method expects to find files named as BrightObjectMask-%(tract)d-%(patch)s-%(filter)s.reg
        The files should be structured as follows:

        # Description of catalogue as a comment
        # CATALOG: catalog-id-string
        # TRACT: 0
        # PATCH: 5,4
        # FILTER: HSC-I

        wcs; fk5

        circle(RA, DEC, RADIUS) # ID: 1

        The commented lines must be present, with the relevant fields such as tract patch and filter filled
        in. The coordinate system must be listed as above. Each patch is specified as a circle, with an RA,
        DEC, and Radius specified in decimal degrees. Only circles are supported as region definitions
        currently.
        """

        log = pexLog.getDefaultLog().createChildLog("ObjectMaskCatalog")

        brightObjects = ObjectMaskCatalog()
        checkedWcsIsFk5 = False

        with open(fileName) as fd:
            for lineNo, line in enumerate(fd.readlines(), 1):
                line = line.rstrip()

                if re.search(r"^\s*#", line):
                    #
                    # Parse any line of the form "# key : value" and put them into the metadata.
                    #
                    # The medatdata values must be defined as outlined in the above docstring
                    #
                    # The value of these three keys will be checked,
                    # so get them right!
                    #
                    mat = re.search(
                        r"^\s*#\s*([a-zA-Z][a-zA-Z0-9_]+)\s*:\s*(.*)", line)
                    if mat:
                        key, value = mat.group(1).lower(), mat.group(2)
                        if key == "tract":
                            value = int(value)

                        brightObjects.table.getMetadata().set(key, value)

                line = re.sub(r"^\s*#.*", "", line)
                if not line:
                    continue

                if re.search(r"^\s*wcs\s*;\s*fk5\s*$", line, re.IGNORECASE):
                    checkedWcsIsFk5 = True
                    continue

                # This regular expression parses the regions file for each region to be masked,
                # with the format as specified in the above docstring.
                mat = re.search(
                    r"^\s*circle(?:\s+|\s*\(\s*)"
                    "(\d+(?:\.\d*)([d]*))"
                    "(?:\s+|\s*,\s*)"
                    "([+-]?\d+(?:\.\d*)([d]*))"
                    "(?:\s+|\s*,\s*)"
                    "(\d+(?:\.\d*))([d'\"]*)"
                    "(?:\s*|\s*\)\s*)"
                    "\s*#\s*ID:\s*(\d+)"
                    "\s*$", line)
                if mat:
                    ra, raUnit, dec, decUnit, radius, radiusUnit, _id = mat.groups(
                    )

                    _id = int(_id)
                    ra = convertToAngle(ra, raUnit, "ra", fileName, lineNo)
                    dec = convertToAngle(dec, decUnit, "dec", fileName, lineNo)
                    radius = convertToAngle(radius, radiusUnit, "radius",
                                            fileName, lineNo)

                    rec = brightObjects.addNew()
                    # N.b. rec["coord"] = Coord is not supported, so we have to use the setter
                    rec["id"] = _id
                    rec.setCoord(afwCoord.Fk5Coord(ra, dec))
                    rec["radius"] = radius
                else:
                    log.warn("Unexpected line \"%s\" at %s:%d" %
                             (line, fileName, lineNo))

        if not checkedWcsIsFk5:
            raise RuntimeError("Expected to see a line specifying an fk5 wcs")

        # This makes the deep copy contiguous in memory so that a ColumnView can be exposed to Numpy
        brightObjects._catalog = brightObjects._catalog.copy(True)

        return brightObjects
Esempio n. 24
0
    def __init__(self, label, rerun, cameraInfo, qaDataUtils, **kwargs):
        """
        @param label The name of this data set
        @param rerun The rerun to retrieve
        @param cameraInfo A cameraInfo object containing specs on the camera
        """
        
        self.label = label
        self.rerun = rerun
        self.cameraInfo = cameraInfo
        self.dataInfo = self.cameraInfo.dataInfo

        self.log = kwargs.get('log', pexLog.getDefaultLog())
        
        self.dataIdNames   = []
        self.dataIdDiscrim = []
        for array in self.dataInfo:
            dataIdName, dataIdDiscrim = array
            self.dataIdNames.append(dataIdName)
            self.dataIdDiscrim.append(dataIdDiscrim)


        self.initCache()

        self.loadDepth = 0
        self.lastPrint = None
        self.t0 = []

        self.brokenDataIdList = []


        self.ccdConvention = 'ccd'
        if self.cameraInfo.name == 'lsstSim':
            self.ccdConvention = 'sensor'
        if self.cameraInfo.name == 'sdss':
            self.ccdConvention = 'camcol'

            
                
        refCatObj     = pqaSource.RefCatalog()
        self.k_rPsf   = refCatObj.keyDict['PsfFlux']
        self.k_rAp    = refCatObj.keyDict['ApFlux']
        self.k_rMod   = refCatObj.keyDict['ModelFlux']
        self.k_rInst  = refCatObj.keyDict['InstFlux']
        self.k_rPsfE  = refCatObj.keyDict['PsfFluxErr']
        self.k_rApE   = refCatObj.keyDict['ApFluxErr']
        self.k_rModE  = refCatObj.keyDict['ModelFluxErr']
        self.k_rInstE = refCatObj.keyDict['InstFluxErr']
        self.k_rRa    = refCatObj.keyDict['Ra']
        self.k_rDec   = refCatObj.keyDict['Dec']
        
        catObj        = pqaSource.Catalog(qaDataUtils=qaDataUtils)
        self.k_x      = catObj.keyDict['XAstrom']
        self.k_y      = catObj.keyDict['YAstrom']
        self.k_Ra     = catObj.keyDict['Ra']
        self.k_Dec    = catObj.keyDict['Dec']
        
        self.k_Psf    = catObj.keyDict['PsfFlux']
        self.k_Ap     = catObj.keyDict['ApFlux']
        self.k_Mod    = catObj.keyDict['ModelFlux']
        self.k_Inst   = catObj.keyDict['InstFlux']

        self.k_PsfE   = catObj.keyDict['PsfFluxErr']
        self.k_ApE    = catObj.keyDict['ApFluxErr']
        self.k_ModE   = catObj.keyDict['ModelFluxErr']
        self.k_InstE  = catObj.keyDict['InstFluxErr']

        self.k_ext    = catObj.keyDict['Extendedness']

        self.k_intc   = catObj.keyDict["FlagPixInterpCen"]
        self.k_satc   = catObj.keyDict["FlagPixSaturCen"]
        self.k_edg    = catObj.keyDict["FlagPixEdge"]
        self.k_neg    = catObj.keyDict['FlagNegative']
        self.k_bad    = catObj.keyDict["FlagBadCentroid"]

        self.k_ixx    = catObj.keyDict['Ixx']
        self.k_iyy    = catObj.keyDict['Iyy']
        self.k_ixy    = catObj.keyDict['Ixy']
                
        self.k_nchild  = catObj.keyDict['deblend_nchild']
Esempio n. 25
0
 def __init__(self, root, log=None):
     if log is None:
         log = getDefaultLog()
     self.log = log
     self.root = root
     self._butler = None
    def selectStars(self, exposure, catalog, matches=None):
        """Return a list of PSF candidates that represent likely stars
        
        A list of PSF candidates may be used by a PSF fitter to construct a PSF.
        
        @param[in] exposure: the exposure containing the sources
        @param[in] catalog: a SourceCatalog containing sources that may be stars
        @param[in] matches: astrometric matches; ignored by this star selector
        
        @return psfCandidateList: a list of PSF candidates.
        """
        import lsstDebug
        display = lsstDebug.Info(__name__).display
        displayExposure = lsstDebug.Info(
            __name__).displayExposure  # display the Exposure + spatialCells
        plotMagSize = lsstDebug.Info(
            __name__).plotMagSize  # display the magnitude-size relation
        dumpData = lsstDebug.Info(
            __name__).dumpData  # dump data to pickle file?

        # create a log for my application
        logger = pexLogging.Log(pexLogging.getDefaultLog(),
                                "meas.algorithms.objectSizeStarSelector")

        detector = exposure.getDetector()
        distorter = None
        xy0 = afwGeom.Point2D(0, 0)
        if not detector is None:
            cPix = detector.getCenterPixel()
            detSize = detector.getSize()
            xy0.setX(cPix.getX() - int(0.5 * detSize.getMm()[0]))
            xy0.setY(cPix.getY() - int(0.5 * detSize.getMm()[1]))
            distorter = detector.getDistortion()
        #
        # Look at the distribution of stars in the magnitude-size plane
        #
        flux = catalog.get(self._sourceFluxField)

        xx = numpy.empty(len(catalog))
        xy = numpy.empty_like(xx)
        yy = numpy.empty_like(xx)
        for i, source in enumerate(catalog):
            Ixx, Ixy, Iyy = source.getIxx(), source.getIxy(), source.getIyy()
            if distorter:
                xpix, ypix = source.getX() + xy0.getX(), source.getY(
                ) + xy0.getY()
                p = afwGeom.Point2D(xpix, ypix)
                m = distorter.undistort(p, geomEllip.Quadrupole(Ixx, Iyy, Ixy),
                                        detector)
                Ixx, Ixy, Iyy = m.getIxx(), m.getIxy(), m.getIyy()

            xx[i], xy[i], yy[i] = Ixx, Ixy, Iyy

        width = numpy.sqrt(xx + yy)

        bad = reduce(lambda x, y: numpy.logical_or(x, catalog.get(y)),
                     self._badFlags, False)
        bad = numpy.logical_or(bad, flux < self._fluxMin)
        bad = numpy.logical_or(bad, numpy.logical_not(numpy.isfinite(width)))
        bad = numpy.logical_or(bad, numpy.logical_not(numpy.isfinite(flux)))
        bad = numpy.logical_or(bad, width < self._widthMin)
        bad = numpy.logical_or(bad, width > self._widthMax)
        if self._fluxMax > 0:
            bad = numpy.logical_or(bad, flux > self._fluxMax)
        good = numpy.logical_not(bad)

        if not numpy.any(good):
            raise RuntimeError(
                "No objects passed our cuts for consideration as psf stars")

        mag = -2.5 * numpy.log10(flux[good])
        width = width[good]
        #
        # Look for the maximum in the size histogram, then search upwards for the minimum that separates
        # the initial peak (of, we presume, stars) from the galaxies
        #
        if dumpData:
            import os, cPickle as pickle
            _ii = 0
            while True:
                pickleFile = os.path.expanduser(
                    os.path.join("~", "widths-%d.pkl" % _ii))
                if not os.path.exists(pickleFile):
                    break
                _ii += 1

            with open(pickleFile, "wb") as fd:
                pickle.dump(mag, fd, -1)
                pickle.dump(width, fd, -1)

        centers, clusterId = _kcenters(width, nCluster=4, useMedian=True)

        if display and plotMagSize and pyplot:
            fig = plot(mag,
                       width,
                       centers,
                       clusterId,
                       marker="+",
                       markersize=3,
                       markeredgewidth=None,
                       ltype=':',
                       clear=True)
        else:
            fig = None

        clusterId = _improveCluster(width, centers, clusterId)

        if display and plotMagSize and pyplot:
            plot(mag,
                 width,
                 centers,
                 clusterId,
                 marker="x",
                 markersize=3,
                 markeredgewidth=None)

        stellar = (clusterId == 0)
        #
        # We know enough to plot, if so requested
        #
        frame = 0

        if fig:
            if display and displayExposure:
                ds9.mtv(exposure.getMaskedImage(),
                        frame=frame,
                        title="PSF candidates")

                global eventHandler
                eventHandler = EventHandler(fig.get_axes()[0],
                                            mag,
                                            width,
                                            catalog.getX()[good],
                                            catalog.getY()[good],
                                            frames=[frame])

            fig.show()

            #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-

            while True:
                try:
                    reply = raw_input(
                        "continue? [c h(elp) q(uit) p(db)] ").strip()
                except EOFError:
                    reply = "y"

                if reply:
                    if reply[0] == "h":
                        print """\
    We cluster the points; red are the stellar candidates and the other colours are other clusters.
    Points labelled + are rejects from the cluster (only for cluster 0).

    At this prompt, you can continue with almost any key; 'p' enters pdb, and 'h' prints this text

    If displayExposure is true, you can put the cursor on a point and hit 'p' to see it in ds9.
    """
                    elif reply[0] == "p":
                        import pdb
                        pdb.set_trace()
                    elif reply[0] == 'q':
                        sys.exit(1)
                    else:
                        break

        if display and displayExposure:
            mi = exposure.getMaskedImage()

            with ds9.Buffering():
                for i, source in enumerate(catalog):
                    if good[i]:
                        ctype = ds9.GREEN  # star candidate
                    else:
                        ctype = ds9.RED  # not star

                    ds9.dot("+",
                            source.getX() - mi.getX0(),
                            source.getY() - mi.getY0(),
                            frame=frame,
                            ctype=ctype)
        #
        # Time to use that stellar classification to generate psfCandidateList
        #
        with ds9.Buffering():
            psfCandidateList = []
            for isStellar, source in zip(
                    stellar, [s for g, s in zip(good, catalog) if g]):
                if not isStellar:
                    continue

                try:
                    psfCandidate = algorithmsLib.makePsfCandidate(
                        source, exposure)
                    # The setXXX methods are class static, but it's convenient to call them on
                    # an instance as we don't know Exposure's pixel type
                    # (and hence psfCandidate's exact type)
                    if psfCandidate.getWidth() == 0:
                        psfCandidate.setBorderWidth(self._borderWidth)
                        psfCandidate.setWidth(self._kernelSize +
                                              2 * self._borderWidth)
                        psfCandidate.setHeight(self._kernelSize +
                                               2 * self._borderWidth)

                    im = psfCandidate.getMaskedImage().getImage()
                    vmax = afwMath.makeStatistics(im, afwMath.MAX).getValue()
                    if not numpy.isfinite(vmax):
                        continue
                    psfCandidateList.append(psfCandidate)

                    if display and displayExposure:
                        ds9.dot("o",
                                source.getX() - mi.getX0(),
                                source.getY() - mi.getY0(),
                                size=4,
                                frame=frame,
                                ctype=ds9.CYAN)
                except Exception as err:
                    logger.log(
                        pexLogging.Log.INFO,
                        "Failed to make a psfCandidate from source %d: %s" %
                        (source.getId(), err))

        return psfCandidateList
Esempio n. 27
0
    def readFits(fileName, hdu=0, flags=0):
        """Read a ds9 region file, returning a ObjectMaskCatalog object

        This method is called "readFits" to fool the butler. The corresponding mapper entry looks like
        brightObjectMask: {
            template:      "deepCoadd/BrightObjectMasks/%(tract)d/BrightObjectMask-%(tract)d-%(patch)s-%(filter)s.reg"
            python:        "lsst.obs.subaru.objectMasks.ObjectMaskCatalog"
            persistable:   "PurePythonClass"
            storage:       "FitsCatalogStorage"
        }
        and this is the only way I know to get it to read a random file type, in this case a ds9 region file

        This method expects to find files named as BrightObjectMask-%(tract)d-%(patch)s-%(filter)s.reg
        The files should be structured as follows:

        # Description of catalogue as a comment
        # CATALOG: catalog-id-string
        # TRACT: 0
        # PATCH: 5,4
        # FILTER: HSC-I

        wcs; fk5

        circle(RA, DEC, RADIUS) # ID: 1

        The commented lines must be present, with the relevant fields such as tract patch and filter filled
        in. The coordinate system must be listed as above. Each patch is specified as a circle, with an RA,
        DEC, and Radius specified in decimal degrees. Only circles are supported as region definitions
        currently.
        """

        log = pexLog.getDefaultLog().createChildLog("ObjectMaskCatalog")

        brightObjects = ObjectMaskCatalog()
        checkedWcsIsFk5 = False

        with open(fileName) as fd:
            for lineNo, line in enumerate(fd.readlines(), 1):
                line = line.rstrip()

                if re.search(r"^\s*#", line):
                    #
                    # Parse any line of the form "# key : value" and put them into the metadata.
                    #
                    # The medatdata values must be defined as outlined in the above docstring
                    # 
                    # The value of these three keys will be checked,
                    # so get them right!
                    #
                    mat = re.search(r"^\s*#\s*([a-zA-Z][a-zA-Z0-9_]+)\s*:\s*(.*)", line)
                    if mat:
                        key, value = mat.group(1).lower(), mat.group(2)
                        if key == "tract":
                            value = int(value)

                        brightObjects.table.getMetadata().set(key, value)

                line = re.sub(r"^\s*#.*", "", line)
                if not line:
                    continue

                if re.search(r"^\s*wcs\s*;\s*fk5\s*$", line, re.IGNORECASE):
                    checkedWcsIsFk5 = True
                    continue

                # This regular expression parses the regions file for each region to be masked,
                # with the format as specified in the above docstring.
                mat = re.search(r"^\s*circle(?:\s+|\s*\(\s*)"
                                "(\d+(?:\.\d*)([d]*))" "(?:\s+|\s*,\s*)"
                                "([+-]?\d+(?:\.\d*)([d]*))" "(?:\s+|\s*,\s*)"
                                "(\d+(?:\.\d*))([d'\"]*)" "(?:\s*|\s*\)\s*)"
                                "\s*#\s*ID:\s*(\d+)" "\s*$"
                                , line)
                if mat:
                    ra, raUnit, dec, decUnit, radius, radiusUnit, _id = mat.groups()

                    _id = int(_id)
                    ra =     convertToAngle(ra, raUnit, "ra", fileName, lineNo)
                    dec =    convertToAngle(dec, decUnit, "dec", fileName, lineNo)
                    radius = convertToAngle(radius, radiusUnit, "radius", fileName, lineNo)

                    rec = brightObjects.addNew()
                    # N.b. rec["coord"] = Coord is not supported, so we have to use the setter
                    rec["id"] = _id
                    rec.setCoord(afwCoord.Fk5Coord(ra, dec))
                    rec["radius"] = radius
                else:
                    log.warn("Unexpected line \"%s\" at %s:%d" % (line, fileName, lineNo))

        if not checkedWcsIsFk5:
            raise RuntimeError("Expected to see a line specifying an fk5 wcs")

        # This makes the deep copy contiguous in memory so that a ColumnView can be exposed to Numpy
        brightObjects._catalog = brightObjects._catalog.copy(True)

        return brightObjects
Esempio n. 28
0
# the GNU General Public License along with this program.  If not, 
# see <http://www.lsstcorp.org/LegalNotices/>.
#

"""
@brief Demonstrate the use of the logging framework from Python
"""

import lsst.pex.logging as log
from lsst.pex.logging import Rec, Prop, endr

if __name__ == "__main__":

    # in any script or function where you want to log message, the first
    # thing you should do is get create a Log from the DefaultLog
    mylog = log.Log(log.getDefaultLog(), "myapp.myfunc")

    # simple message can be sent with a function.  The first argument is
    # the level of message; choices are: DEBUG, INFO, WARN, FATAL.  
    mylog.log(mylog.INFO, "this is a simple message");

    # If you want to send multiple messages and/or properties all in the
    # same message, you can use the shift operator.  Be sure to end the
    # message with "endr"
    Rec(mylog, mylog.WARN) << "ouch!" << Prop("NODE", 5) \
                           << "something bad happened!" << endr

    # Normally properties are not printed to the screen.  To see these, we'll
    # turn them on now.
    #
    # Outside the pipeline framework, the default logger is a ScreenLog.
def getBackground(image, backgroundConfig, nx=0, ny=0, algorithm=None):
    """!Estimate the background of an image (a thin layer on lsst.afw.math.makeBackground)

    \param[in] image  image whose background is to be computed
    \param[in] backgroundConfig  configuration (a BackgroundConfig)
    \param[in] nx  number of x bands; 0 for default
    \param[in] ny  number of y bands; 0 for default
    \param[in] algorithm  name of interpolation algorithm; see lsst.afw.math.BackgroundControl for details
    """
    backgroundConfig.validate();

    logger = pexLogging.getDefaultLog()
    logger = pexLogging.Log(logger,"lsst.meas.algorithms.detection.getBackground")

    if not nx:
        nx = image.getWidth()//backgroundConfig.binSize + 1
    if not ny:
        ny = image.getHeight()//backgroundConfig.binSize + 1

    displayBackground = lsstDebug.Info(__name__).displayBackground
    if displayBackground:
        import itertools
        ds9.mtv(image, frame=1)
        xPosts = numpy.rint(numpy.linspace(0, image.getWidth() + 1, num=nx, endpoint=True))
        yPosts = numpy.rint(numpy.linspace(0, image.getHeight() + 1, num=ny, endpoint=True))
        with ds9.Buffering():
            for (xMin, xMax), (yMin, yMax) in itertools.product(zip(xPosts[:-1], xPosts[1:]),
                                                                zip(yPosts[:-1], yPosts[1:])):
                ds9.line([(xMin, yMin), (xMin, yMax), (xMax, yMax), (xMax, yMin), (xMin, yMin)], frame=1)


    sctrl = afwMath.StatisticsControl()
    sctrl.setAndMask(reduce(lambda x, y: x | image.getMask().getPlaneBitMask(y),
                            backgroundConfig.ignoredPixelMask, 0x0))
    sctrl.setNanSafe(backgroundConfig.isNanSafe)

    pl = pexLogging.Debug("lsst.meas.algorithms.detection.getBackground")
    pl.debug(3, "Ignoring mask planes: %s" % ", ".join(backgroundConfig.ignoredPixelMask))

    if not algorithm:
        algorithm = backgroundConfig.algorithm

    bctrl = afwMath.BackgroundControl(algorithm, nx, ny,
                                      backgroundConfig.undersampleStyle, sctrl,
                                      backgroundConfig.statisticsProperty)

    # TODO: The following check should really be done within afw/math.  With the
    #       current code structure, it would need to be accounted for in the
    #       doGetImage() funtion in BackgroundMI.cc (which currently only checks
    #       against the interpoation settings which is not appropriate when
    #       useApprox=True) and/or the makeApproximate() function in
    #       afw/Approximate.cc.
    #       See ticket DM-2920: "Clean up code in afw for Approximate background
    #       estimation" (which includes a note to remove the following and the
    #       similar checks in pipe_tasks/matchBackgrounds.py once implemented)
    #
    # Check that config setting of approxOrder/binSize make sense
    # (i.e. ngrid (= shortDimension/binSize) > approxOrderX) and perform
    # appropriate undersampleStlye behavior.
    if backgroundConfig.useApprox:
        if not backgroundConfig.approxOrderY in (backgroundConfig.approxOrderX,-1):
            raise ValueError("Error: approxOrderY not in (approxOrderX, -1)")
        order = backgroundConfig.approxOrderX
        minNumberGridPoints = backgroundConfig.approxOrderX + 1
        if min(nx,ny) <= backgroundConfig.approxOrderX:
            logger.warn("Too few points in grid to constrain fit: min(nx, ny) < approxOrder) "+
                        "[min(%d, %d) < %d]" % (nx, ny, backgroundConfig.approxOrderX))
            if backgroundConfig.undersampleStyle == "THROW_EXCEPTION":
                raise ValueError("Too few points in grid (%d, %d) for order (%d) and binsize (%d)" % (
                        nx, ny, backgroundConfig.approxOrderX, backgroundConfig.binSize))
            elif backgroundConfig.undersampleStyle == "REDUCE_INTERP_ORDER":
                if order < 1:
                    raise ValueError("Cannot reduce approxOrder below 0.  " +
                                     "Try using undersampleStyle = \"INCREASE_NXNYSAMPLE\" instead?")
                order = min(nx, ny) - 1
                logger.warn("Reducing approxOrder to %d" % order)
            elif backgroundConfig.undersampleStyle == "INCREASE_NXNYSAMPLE":
                newBinSize = min(image.getWidth(),image.getHeight())//(minNumberGridPoints-1)
                if newBinSize < 1:
                    raise ValueError("Binsize must be greater than 0")
                newNx = image.getWidth()//newBinSize + 1
                newNy = image.getHeight()//newBinSize + 1
                bctrl.setNxSample(newNx)
                bctrl.setNySample(newNy)
                logger.warn("Decreasing binSize from %d to %d for a grid of (%d, %d)" %
                            (backgroundConfig.binSize, newBinSize, newNx, newNy))

        actrl = afwMath.ApproximateControl(afwMath.ApproximateControl.CHEBYSHEV, order, order,
                                           backgroundConfig.weighting)
        bctrl.setApproximateControl(actrl)

    return afwMath.makeBackground(image, bctrl)
Esempio n. 30
0
def plotPixelResiduals(exposure, warpedTemplateExposure, diffExposure, kernelCellSet,
                       kernel, background, testSources, config,
                       origVariance = False, nptsFull = 1e6, keepPlots = True, titleFs=14):
    """Plot diffim residuals for LOCAL and SPATIAL models"""
    candidateResids = []
    spatialResids   = []
    nonfitResids    = []

    for cell in kernelCellSet.getCellList():
        for cand in cell.begin(True): # only look at good ones
            # Be sure
            if not (cand.getStatus() == afwMath.SpatialCellCandidate.GOOD):
                continue

            cand    = diffimLib.cast_KernelCandidateF(cand)
            diffim  = cand.getDifferenceImage(diffimLib.KernelCandidateF.ORIG)
            orig    = cand.getScienceMaskedImage()

            ski     = afwImage.ImageD(kernel.getDimensions())
            kernel.computeImage(ski, False, int(cand.getXCenter()), int(cand.getYCenter()))
            sk      = afwMath.FixedKernel(ski)
            sbg     = background(int(cand.getXCenter()), int(cand.getYCenter()))
            sdiffim = cand.getDifferenceImage(sk, sbg)

            # trim edgs due to convolution
            bbox    = kernel.shrinkBBox(diffim.getBBox())
            tdiffim  = diffim.Factory(diffim, bbox)
            torig    = orig.Factory(orig, bbox)
            tsdiffim = sdiffim.Factory(sdiffim, bbox)

            if origVariance:
                candidateResids.append(np.ravel(tdiffim.getImage().getArray() /
                                                np.sqrt(torig.getVariance().getArray())))
                spatialResids.append(np.ravel(tsdiffim.getImage().getArray() /
                                              np.sqrt(torig.getVariance().getArray())))
            else:
                candidateResids.append(np.ravel(tdiffim.getImage().getArray() /
                                                np.sqrt(tdiffim.getVariance().getArray())))
                spatialResids.append(np.ravel(tsdiffim.getImage().getArray() /
                                              np.sqrt(tsdiffim.getVariance().getArray())))

    fullIm   = diffExposure.getMaskedImage().getImage().getArray()
    fullMask = diffExposure.getMaskedImage().getMask().getArray()
    if origVariance:
        fullVar  = exposure.getMaskedImage().getVariance().getArray()
    else:
        fullVar  = diffExposure.getMaskedImage().getVariance().getArray()

    bitmaskBad  = 0
    bitmaskBad |= afwImage.MaskU.getPlaneBitMask('NO_DATA')
    bitmaskBad |= afwImage.MaskU.getPlaneBitMask('SAT')
    idx = np.where((fullMask & bitmaskBad) == 0)
    stride = int(len(idx[0]) // nptsFull)
    sidx = idx[0][::stride], idx[1][::stride]
    allResids = fullIm[sidx] / np.sqrt(fullVar[sidx])

    testFootprints = diffimTools.sourceToFootprintList(testSources, warpedTemplateExposure, 
                                                       exposure, config, pexLog.getDefaultLog())
    for fp in testFootprints:
        subexp = diffExposure.Factory(diffExposure, fp["footprint"].getBBox())
        subim  = subexp.getMaskedImage().getImage()
        if origVariance:
            subvar = afwImage.ExposureF(exposure, fp["footprint"].getBBox()).getMaskedImage().getVariance()
        else:
            subvar = subexp.getMaskedImage().getVariance()
        nonfitResids.append(np.ravel(subim.getArray() / np.sqrt(subvar.getArray())))

    candidateResids = np.ravel(np.array(candidateResids))
    spatialResids   = np.ravel(np.array(spatialResids))
    nonfitResids    = np.ravel(np.array(nonfitResids))

    try:
        import pylab
        from matplotlib.font_manager import FontProperties
    except ImportError, e:
        print "Unable to import pylab: %s" % e
        return
Esempio n. 31
0
def jobLog(job):
    """Add a job-specific log destination"""
    if job is None or job == "None":
        return
    machine = os.uname()[1].split(".")[0]
    pexLog.getDefaultLog().addDestination(job + ".%s.%d" % (machine, os.getpid()))
 def __init__(self, config):
     self.config = config
     self.debugLog = pexLog.Debug("meas.algorithms.psfDeterminer")
     self.warnLog = pexLog.Log(pexLog.getDefaultLog(), "meas.algorithms.psfDeterminer")
 def __init__(self, config):
     self.config = config
     self.debugLog = pexLog.Debug("meas.algorithms.psfDeterminer")
     self.warnLog = pexLog.Log(pexLog.getDefaultLog(),
                               "meas.algorithms.psfDeterminer")
    def selectStars(self, exposure, catalog, matches=None):
        """Return a list of PSF candidates that represent likely stars
        
        A list of PSF candidates may be used by a PSF fitter to construct a PSF.
        
        @param[in] exposure: the exposure containing the sources
        @param[in] catalog: a SourceCatalog containing sources that may be stars
        @param[in] matches: astrometric matches; ignored by this star selector
        
        @return psfCandidateList: a list of PSF candidates.
        """
        import lsstDebug
        display = lsstDebug.Info(__name__).display
        displayExposure = lsstDebug.Info(__name__).displayExposure     # display the Exposure + spatialCells
        plotMagSize = lsstDebug.Info(__name__).plotMagSize             # display the magnitude-size relation
        dumpData = lsstDebug.Info(__name__).dumpData                   # dump data to pickle file?

        # create a log for my application
        logger = pexLogging.Log(pexLogging.getDefaultLog(), "meas.algorithms.objectSizeStarSelector")

        detector = exposure.getDetector()
        distorter = None
        xy0 = afwGeom.Point2D(0,0)
        if not detector is None:
            cPix = detector.getCenterPixel()
            detSize = detector.getSize()
            xy0.setX(cPix.getX() - int(0.5*detSize.getMm()[0]))
            xy0.setY(cPix.getY() - int(0.5*detSize.getMm()[1]))
            distorter = detector.getDistortion()
        #
        # Look at the distribution of stars in the magnitude-size plane
        #
        flux = catalog.get(self._sourceFluxField)

        xx = numpy.empty(len(catalog))
        xy = numpy.empty_like(xx)
        yy = numpy.empty_like(xx)
        for i, source in enumerate(catalog):
            Ixx, Ixy, Iyy = source.getIxx(), source.getIxy(), source.getIyy()
            if distorter:
                xpix, ypix = source.getX() + xy0.getX(), source.getY() + xy0.getY()
                p = afwGeom.Point2D(xpix, ypix)
                m = distorter.undistort(p, geomEllip.Quadrupole(Ixx, Iyy, Ixy), detector)
                Ixx, Ixy, Iyy = m.getIxx(), m.getIxy(), m.getIyy()

            xx[i], xy[i], yy[i] = Ixx, Ixy, Iyy

        width = numpy.sqrt(0.5*(xx + yy))

        bad = reduce(lambda x, y: numpy.logical_or(x, catalog.get(y)), self._badFlags, False)
        bad = numpy.logical_or(bad, flux < self._fluxMin)
        bad = numpy.logical_or(bad, numpy.logical_not(numpy.isfinite(width)))
        bad = numpy.logical_or(bad, numpy.logical_not(numpy.isfinite(flux)))
        bad = numpy.logical_or(bad, width < self._widthMin)
        bad = numpy.logical_or(bad, width > self._widthMax)
        if self._fluxMax > 0:
            bad = numpy.logical_or(bad, flux > self._fluxMax)
        good = numpy.logical_not(bad)

        if not numpy.any(good):
            raise RuntimeError("No objects passed our cuts for consideration as psf stars")

        mag = -2.5*numpy.log10(flux[good])
        width = width[good]
        #
        # Look for the maximum in the size histogram, then search upwards for the minimum that separates
        # the initial peak (of, we presume, stars) from the galaxies
        #
        if dumpData:
            import os, cPickle as pickle
            _ii = 0
            while True:
                pickleFile = os.path.expanduser(os.path.join("~", "widths-%d.pkl" % _ii))
                if not os.path.exists(pickleFile):
                    break
                _ii += 1

            with open(pickleFile, "wb") as fd:
                pickle.dump(mag, fd, -1)
                pickle.dump(width, fd, -1)

        centers, clusterId = _kcenters(width, nCluster=4, useMedian=True,
                                       widthStdAllowed=self._widthStdAllowed)

        if display and plotMagSize and pyplot:
            fig = plot(mag, width, centers, clusterId, magType=self._sourceFluxField.split(".")[-1].title(),
                       marker="+", markersize=3, markeredgewidth=None, ltype=':', clear=True)
        else:
            fig = None
        
        clusterId = _improveCluster(width, centers, clusterId,
                                    nsigma = self._nSigmaClip, widthStdAllowed=self._widthStdAllowed)
        
        if display and plotMagSize and pyplot:
            plot(mag, width, centers, clusterId, marker="x", markersize=3, markeredgewidth=None, clear=False)
        
        stellar = (clusterId == 0)
        #
        # We know enough to plot, if so requested
        #
        frame = 0

        if fig:
            if display and displayExposure:
                ds9.mtv(exposure.getMaskedImage(), frame=frame, title="PSF candidates")

                global eventHandler
                eventHandler = EventHandler(fig.get_axes()[0], mag, width,
                                            catalog.getX()[good], catalog.getY()[good], frames=[frame])

            fig.show()

            #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-

            while True:
                try:
                    reply = raw_input("continue? [c h(elp) q(uit) p(db)] ").strip()
                except EOFError:
                    reply = None
                if not reply:
                    reply = "c"

                if reply:
                    if reply[0] == "h":
                        print """\
    We cluster the points; red are the stellar candidates and the other colours are other clusters.
    Points labelled + are rejects from the cluster (only for cluster 0).

    At this prompt, you can continue with almost any key; 'p' enters pdb, and 'h' prints this text

    If displayExposure is true, you can put the cursor on a point and hit 'p' to see it in ds9.
    """
                    elif reply[0] == "p":
                        import pdb; pdb.set_trace()
                    elif reply[0] == 'q':
                        sys.exit(1)
                    else:
                        break
        
        if display and displayExposure:
            mi = exposure.getMaskedImage()
    
            with ds9.Buffering():
                for i, source in enumerate(catalog):
                    if good[i]:
                        ctype = ds9.GREEN # star candidate
                    else:
                        ctype = ds9.RED # not star
                        
                    ds9.dot("+", source.getX() - mi.getX0(),
                            source.getY() - mi.getY0(), frame=frame, ctype=ctype)
        #
        # Time to use that stellar classification to generate psfCandidateList
        #
        with ds9.Buffering():
            psfCandidateList = []
            for isStellar, source in zip(stellar, [s for g, s in zip(good, catalog) if g]):
                if not isStellar:
                    continue
                
                try:
                    psfCandidate = algorithmsLib.makePsfCandidate(source, exposure)
                    # The setXXX methods are class static, but it's convenient to call them on
                    # an instance as we don't know Exposure's pixel type
                    # (and hence psfCandidate's exact type)
                    if psfCandidate.getWidth() == 0:
                        psfCandidate.setBorderWidth(self._borderWidth)
                        psfCandidate.setWidth(self._kernelSize + 2*self._borderWidth)
                        psfCandidate.setHeight(self._kernelSize + 2*self._borderWidth)

                    im = psfCandidate.getMaskedImage().getImage()
                    vmax = afwMath.makeStatistics(im, afwMath.MAX).getValue()
                    if not numpy.isfinite(vmax):
                        continue
                    psfCandidateList.append(psfCandidate)

                    if display and displayExposure:
                        ds9.dot("o", source.getX() - mi.getX0(), source.getY() - mi.getY0(),
                                size=4, frame=frame, ctype=ds9.CYAN)
                except Exception as err:
                    logger.logdebug("Failed to make a psfCandidate from source %d: %s" % (source.getId(), err))

        return psfCandidateList
Esempio n. 35
0
    def testAlgorithms(self):
        """Test that we can instantiate and use algorithms"""

        config = measAlg.SourceMeasurementConfig()
        config.algorithms.names = measAlg.AlgorithmRegistry.all.keys()
        config.algorithms.names.discard(config.centroider.name)
        config.doReplaceWithNoise = False

        config.algorithms.names.discard("flux.peakLikelihood")

        if False:
            log = pexLog.getDefaultLog()
            log.setThreshold(log.DEBUG)

        schema = afwTable.SourceTable.makeMinimalSchema()
        task = measAlg.SourceMeasurementTask(schema, config=config)
        catalog = afwTable.SourceCatalog(schema)
        source = catalog.addNew()
        source.set("id", 12345)

        size = 256
        xStar, yStar = 65.432, 76.543
        width = 3.21
        x0, y0 = 12345, 54321
        x, y = numpy.indices((size, size))
        im = afwImage.MaskedImageF(afwGeom.ExtentI(size, size))
        im.setXY0(afwGeom.Point2I(x0, y0))
        im.getVariance().set(1.0)
        arr = im.getImage().getArray()
        arr[y,
            x] = numpy.exp(-0.5 * ((x - xStar)**2 + (y - yStar)**2) / width**2)
        psf = testLib.makeTestPsf(im)
        exp = afwImage.makeExposure(im)
        exp.setPsf(psf)
        exp.setXY0(afwGeom.Point2I(x0, y0))
        scale = 1.0e-5
        wcs = afwImage.makeWcs(
            afwCoord.Coord(0.0 * afwGeom.degrees, 0.0 * afwGeom.degrees),
            afwGeom.Point2D(0.0, 0.0), scale, 0.0, 0.0, scale)
        exp.setWcs(wcs)

        point = afwGeom.Point2I(int(xStar + x0), int(yStar + y0))
        bbox = im.getBBox()
        bbox.shift(afwGeom.Extent2I(x0, y0))
        foot = afwDetection.Footprint(point, width, bbox)
        foot.addPeak(point.getX(), point.getY(), 1.0)
        afwDetection.setMaskFromFootprint(exp.getMaskedImage().getMask(), foot,
                                          1)
        source.setFootprint(foot)

        if display:
            ds9.mtv(exp, frame=1)

        task.run(exp, catalog)

        for alg in config.algorithms:
            flagName = alg + ".flags"
            if False:
                print(alg,
                      source.get(flagName) if flagName in schema else None,
                      source.get(alg) if alg in schema else None)
            elif flagName in schema:
                self.assertFalse(source.get(alg + ".flags"))
    def selectStars(self, exposure, catalog, matches=None):
        """Return a list of PSF candidates that represent likely stars
        
        A list of PSF candidates may be used by a PSF fitter to construct a PSF.
        
        @param[in] exposure: the exposure containing the sources
        @param[in] catalog: a SourceCatalog containing sources that may be stars
        @param[in] matches: astrometric matches; ignored by this star selector
        
        @return psfCandidateList: a list of PSF candidates.
        """
        import lsstDebug
        display = lsstDebug.Info(__name__).display

        displayExposure = display and \
            lsstDebug.Info(__name__).displayExposure # display the Exposure + spatialCells
        plotFwhmHistogram = display and plt and \
            lsstDebug.Info(__name__).plotFwhmHistogram # Plot histogram of FWHM
        plotFlags = display and plt and \
            lsstDebug.Info(__name__).plotFlags # Plot the sources coloured by their flags
        plotRejection = display and plt and \
            lsstDebug.Info(__name__).plotRejection # Plot why sources are rejected
        # create a log for my application
        logger = pexLogging.Log(pexLogging.getDefaultLog(), "meas.extensions.psfex.psfexStarSelector")

        #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
        #
        fluxName = self.config.fluxName
        fluxErrName = self.config.fluxErrName
        minFwhm = self.config.minFwhm
        maxFwhm = self.config.maxFwhm
        maxFwhmVariability = self.config.maxFwhmVariability
        maxbad = self.config.maxbad
        maxbadflag = self.config.maxbadflag
        maxellip = self.config.maxellip
        minsn = self.config.minsn

        maxelong = (maxellip + 1.0)/(1.0 - maxellip) if maxellip < 1.0 else 100

        # Unpack the catalogue
        shape = catalog.getShapeDefinition()
        ixx = catalog.get("%s.xx" % shape)
        iyy = catalog.get("%s.yy" % shape)

        fwhm = 2*np.sqrt(2*np.log(2))*np.sqrt(0.5*(ixx + iyy))
        elong = 0.5*(ixx - iyy)/(ixx + iyy)

        flux = catalog.get(fluxName)
        fluxErr = catalog.get(fluxErrName)
        sn = flux/np.where(fluxErr > 0, fluxErr, 1)
        sn[fluxErr <= 0] = -psfex.psfex.cvar.BIG

        flags = 0x0
        for i, f in enumerate(self.config.badFlags):
            flags = np.bitwise_or(flags, np.where(catalog.get(f), 1 << i, 0))
        #
        # Estimate the acceptable range of source widths
        #
        good = np.logical_and(sn > minsn, np.logical_not(flags))
        good = np.logical_and(good, elong < maxelong)
        good = np.logical_and(good, fwhm >= minFwhm)
        good = np.logical_and(good, fwhm <  maxFwhm)

        fwhmMode, fwhmMin, fwhmMax = psfex.compute_fwhmrange(fwhm[good], maxFwhmVariability, minFwhm, maxFwhm,
                                                             plot=dict(fwhmHistogram=plotFwhmHistogram))

        #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
        #
        # Here's select_candidates
        #
        #---- Apply some selection over flags, fluxes...

        bad = (flags != 0)
        #set.setBadFlags(int(sum(bad)))

        if plotRejection:
            selectionVectors = []
            selectionVectors.append((bad, "flags %d" % sum(bad)))

        dbad = sn < minsn
        #set.setBadSN(int(sum(dbad)))
        bad = np.logical_or(bad, dbad)
        if plotRejection:
            selectionVectors.append((dbad, "S/N %d" % sum(dbad)))

        dbad = fwhm < fwhmMin
        #set.setBadFrmin(int(sum(dbad)))
        bad = np.logical_or(bad, dbad)
        if plotRejection:
            selectionVectors.append((dbad, "fwhmMin %d" % sum(dbad)))

        dbad = fwhm > fwhmMax
        #set.setBadFrmax(int(sum(dbad)))
        bad = np.logical_or(bad, dbad)
        if plotRejection:
            selectionVectors.append((dbad, "fwhmMax %d" % sum(dbad)))

        dbad = elong > maxelong
        #set.setBadElong(int(sum(dbad)))
        bad = np.logical_or(bad, dbad)
        if plotRejection:
            selectionVectors.append((dbad, "elong %d" % sum(dbad)))

        #-- ... and check the integrity of the sample
        if maxbadflag:
            nbad = np.array([(v <= -psfex.psfex.cvar.BIG).sum() for v in vignet])
            dbad = nbad > maxbad
            #set.setBadPix(int(sum(dbad)))
            bad = np.logical_or(bad, dbad)
            if plotRejection:
                selectionVectors.append((dbad, "badpix %d" % sum(dbad)))

        good = np.logical_not(bad)
        #
        # We know enough to plot, if so requested
        #
        frame = 0
        if displayExposure:
            mi = exposure.getMaskedImage()
    
            ds9.mtv(mi, frame=frame, title="PSF candidates")

            with ds9.Buffering():
                for i, source in enumerate(catalog):
                    if good[i]:
                        ctype = ds9.GREEN # star candidate
                    else:
                        ctype = ds9.RED # not star

                    ds9.dot("+", source.getX() - mi.getX0(), source.getY() - mi.getY0(),
                            frame=frame, ctype=ctype)

        if plotFlags or plotRejection:
            imag = -2.5*np.log10(flux)
            plt.clf()

            alpha = 0.5
            if plotFlags:
                isSet = np.where(flags == 0x0)[0]
                plt.plot(imag[isSet], fwhm[isSet], 'o', alpha=alpha, label="good")

                for i, f in enumerate(self.config.badFlags):
                    mask = 1 << i
                    isSet = np.where(np.bitwise_and(flags, mask))[0]
                    if isSet.any():
                        if np.isfinite(imag[isSet] + fwhm[isSet]).any():
                            label = re.sub(r"\_flag", "",
                                                  re.sub(r"^base\_", "",
                                                         re.sub(r"^.*base\_PixelFlags\_flag\_", "", f)))
                            plt.plot(imag[isSet], fwhm[isSet], 'o', alpha=alpha, label=label)
            else:
                for bad, label in selectionVectors:
                    plt.plot(imag[bad], fwhm[bad], 'o', alpha=alpha, label=label)

            plt.plot(imag[good], fwhm[good], 'o', color="black", label="selected")
            [plt.axhline(_, color='red') for _ in [fwhmMin, fwhmMax]]
            plt.xlim(np.median(imag[good]) + 5*np.array([-1, 1]))
            plt.ylim(fwhm[np.where(np.isfinite(fwhm + imag))].min(), 2*fwhmMax)
            plt.legend(loc=2)
            plt.xlabel("Instrumental %s Magnitude" % fluxName.split(".")[-1].title())
            plt.ylabel("fwhm")
            title = "PSFEX Star Selection"
            plt.title("%s %d selected" % (title, sum(good)))

        if displayExposure:
            global eventHandler
            eventHandler = EventHandler(plt.axes(), imag, fwhm, catalog.getX(), catalog.getY(), frames=[frame])

        if plotFlags or plotRejection:
            while True:
                try:
                    reply = raw_input("continue? [y[es] h(elp) p(db) q(uit)] ").strip()
                except EOFError:
                    reply = "y"

                if not reply:
                    reply = "y"

                if reply[0] == "h":
                    print """\
At this prompt, you can continue with almost any key; 'p' enters pdb,
                                                      'q' returns to the shell, and
                                                      'h' prints this text
""",

                    if displayExposure:
                        print """
If you put the cursor on a point in the matplotlib scatter plot and hit 'p' you'll see it in ds9."""
                elif reply[0] == "p":
                    import pdb; pdb.set_trace()
                elif reply[0] == 'q':
                    sys.exit(1)
                else:
                    break

        #
        # Time to use that stellar classification to generate psfCandidateList
        #
        with ds9.Buffering():
            psfCandidateList = []
            if True:
                catalog = [s for s,g in zip(catalog, good) if g]
            else:
                catalog = catalog[good]

            for source in catalog:
                try:
                    psfCandidate = measAlg.makePsfCandidate(source, exposure)
                    # The setXXX methods are class static, but it's convenient to call them on
                    # an instance as we don't know Exposure's pixel type
                    # (and hence psfCandidate's exact type)
                    if psfCandidate.getWidth() == 0:
                        psfCandidate.setBorderWidth(self.config.borderWidth)
                        psfCandidate.setWidth(self.config.kernelSize + 2*self.config.borderWidth)
                        psfCandidate.setHeight(self.config.kernelSize + 2*self.config.borderWidth)

                    im = psfCandidate.getMaskedImage().getImage()
                    vmax = afwMath.makeStatistics(im, afwMath.MAX).getValue()
                    if not np.isfinite(vmax):
                        continue
                    psfCandidateList.append(psfCandidate)

                    if display and displayExposure:
                        ds9.dot("o", source.getX() - mi.getX0(), source.getY() - mi.getY0(),
                                size=4, frame=frame, ctype=ds9.CYAN)
                except Exception as err:
                    logger.logdebug("Failed to make a psfCandidate from source %d: %s" % (source.getId(), err))

        return psfCandidateList
Esempio n. 37
0
def printProcessStats():
    """Print the process statistics to the log"""
    from lsst.pex.logging import getDefaultLog
    getDefaultLog().info("Process stats for %s: %s" % (NODE, processStats()))
    def __init__(self, policy, repositoryDir,
                 root=None, registry=None, calibRoot=None, calibRegistry=None,
                 provided=None, outputRoot=None):
        """Initialize the CameraMapper.
        @param policy        (pexPolicy.Policy) Policy with per-camera defaults
                             already merged
        @param repositoryDir (string) Policy repository for the subclassing
                             module (obtained with getRepositoryPath() on the
                             per-camera default dictionary)
        @param root          (string) Root directory for data
        @param registry      (string) Path to registry with data's metadata
        @param calibRoot     (string) Root directory for calibrations
        @param calibRegistry (string) Path to registry with calibrations'
                             metadata
        @param provided      (list of strings) Keys provided by the mapper
        @param outputRoot    (string) Root directory for output data
        """

        dafPersist.Mapper.__init__(self)

        self.log = pexLog.Log(pexLog.getDefaultLog(), "CameraMapper")

        # Dictionary
        dictFile = pexPolicy.DefaultPolicyFile("daf_butlerUtils",
                "MapperDictionary.paf", "policy")
        dictPolicy = pexPolicy.Policy.createPolicy(dictFile,
                dictFile.getRepositoryPath())
        policy.mergeDefaults(dictPolicy)

        # Levels
        self.levels = dict()
        if policy.exists("levels"):
            levelsPolicy = policy.getPolicy("levels")
            for key in levelsPolicy.names(True):
                self.levels[key] = set(levelsPolicy.getStringArray(key))
        self.defaultLevel = policy.getString("defaultLevel")
        self.defaultSubLevels = dict()
        if policy.exists("defaultSubLevels"):
            defaultSubLevelsPolicy = policy.getPolicy("defaultSubLevels")
            for key in defaultSubLevelsPolicy.names(True):
                self.defaultSubLevels[key] = defaultSubLevelsPolicy.getString(key)

        # Root directories
        if root is None:
            root = "."
        root = dafPersist.LogicalLocation(root).locString()

        if outputRoot is not None:
            # Path manipulations are subject to race condition
            if not os.path.exists(outputRoot):
                try:
                    os.makedirs(outputRoot)
                except OSError, e:
                    if not e.errno == errno.EEXIST:
                        raise
                if not os.path.exists(outputRoot):
                    raise RuntimeError, "Unable to create output " \
                            "repository '%s'" % (outputRoot,)
            if os.path.exists(root):
                # Symlink existing input root to "_parent" in outputRoot.
                src = os.path.abspath(root)
                dst = os.path.join(outputRoot, "_parent")
                if not os.path.exists(dst):
                    try:
                        os.symlink(src, dst)
                    except OSError:
                        pass
                if os.path.exists(dst):
                    if os.path.realpath(dst) != os.path.realpath(src):
                        raise RuntimeError, "Output repository path " \
                                "'%s' already exists and differs from " \
                                "input repository path '%s'" % (dst, src)
                else:
                    raise RuntimeError, "Unable to symlink from input " \
                            "repository path '%s' to output repository " \
                            "path '%s'" % (src, dst)
            # We now use the outputRoot as the main root with access to the
            # input via "_parent".
            root = outputRoot
Esempio n. 39
0
 def __init__(self, root, log=None):
     if log is None:
         log = getDefaultLog()
     self.log = log
     self.root = root
     self._butler = None