Ejemplo n.º 1
0
 def setUp(self):
     # Load sample input from disk
     expPath = os.path.join(getPackageDir("pipe_tasks"), "tests", "data",
                            "v695833-e0-c000-a00.sci.fits")
     self.exposure = afwImage.ExposureF(expPath)
     # set log level so that warnings do not display
     Log.getLogger("characterizeImage").setLevel(Log.ERROR)
Ejemplo n.º 2
0
    def testBasics(self):
        """!Test basic functionality of LinearizeLookupTable
        """
        for imageClass in (afwImage.ImageF, afwImage.ImageD):
            inImage = makeRampImage(bbox=self.bbox,
                                    start=-5,
                                    stop=250,
                                    imageClass=imageClass)
            table = self.makeTable(inImage)

            log = Log.getLogger("ip.isr.LinearizeLookupTable")

            measImage = inImage.Factory(inImage, True)
            llt = Linearizer(table=table, detector=self.detector)
            linRes = llt.applyLinearity(measImage,
                                        detector=self.detector,
                                        log=log)

            refImage = inImage.Factory(inImage, True)
            refNumOutOfRange = refLinearize(image=refImage,
                                            detector=self.detector,
                                            table=table)

            self.assertEqual(linRes.numAmps,
                             len(self.detector.getAmplifiers()))
            self.assertEqual(linRes.numAmps, linRes.numLinearized)
            self.assertEqual(linRes.numOutOfRange, refNumOutOfRange)
            self.assertImagesAlmostEqual(refImage, measImage)

            # make sure logging is accepted
            log = Log.getLogger("ip.isr.LinearizeLookupTable")
            linRes = llt.applyLinearity(image=measImage,
                                        detector=self.detector,
                                        log=log)
Ejemplo n.º 3
0
Archivo: utils.py Proyecto: lsst/log
def traceSetAt(name, number):
    """!Adjust logging level to display messages with trace number <= NUMBER

    Set the levels of loggers "TRACEn.name" with n > NUMBER to INFO,
    and those with n <= NUMBER to DEBUG, so that only tracing messages
    with n <= NUMBER are shown.

    @param[in] name  The logger name
    @param[in] number  The trace number threshold for display
    """
    for i in range(6):
        level = Log.INFO if i > number else Log.DEBUG
        Log.getLogger('TRACE%d.%s' % (i, name)).setLevel(level)
Ejemplo n.º 4
0
    def setUp(self):
        # Load sample input from disk
        expPath = os.path.join(getPackageDir("pipe_tasks"), "tests", "data",
                               "v695833-e0-c000-a00.sci.fits")
        self.exposure = afwImage.ExposureF(expPath)

        # Characterize the image (create PSF, etc.)
        charImConfig = CharacterizeImageConfig()
        charImTask = CharacterizeImageTask(config=charImConfig)
        self.charImResults = charImTask.run(self.exposure)

        # set log level so that warnings do not display
        Log.getLogger("calibrate").setLevel(Log.ERROR)
Ejemplo n.º 5
0
def traceSetAt(name, number):
    """!Adjust logging level to display messages with trace number <= NUMBER

    Set the levels of loggers "TRACEn.name" with n > NUMBER to INFO,
    and those with n <= NUMBER to DEBUG, so that only tracing messages
    with n <= NUMBER are shown.

    @param[in] name  The logger name
    @param[in] number  The trace number threshold for display
    """
    for i in range(6):
        level = Log.INFO if i > number else Log.DEBUG
        Log.getLogger('TRACE%d.%s' % (i, name)).setLevel(level)
Ejemplo n.º 6
0
Archivo: utils.py Proyecto: lsst/log
def traceSetAt(name, number):
    """Adjusts logging level to display messages with the trace number being
    less than or equal to the provided value.

    Parameters
    ----------
    name : `str`
        Name of the logger.
    number : `int`
        The trace number threshold for display.
    """
    for i in range(6):
        level = Log.INFO if i > number else Log.DEBUG
        Log.getLogger('TRACE%d.%s' % (i, name)).setLevel(level)
def main():

    log = Log.getLogger('foo')
    log.setLevel(Log.INFO)

    ny, nx = 256, 256

    fwhm0 = 5.0
    psf = measAlg.DoubleGaussianPsf(21, 21, fwhm0)
    flux = 1.0e6

    # make two sets of fake data, seconds set is missing a source
    nSrc = 4
    xy = randomCoords(nSrc)
    fluxs = [flux]*(nSrc-1) + [0.7*flux]
    mimg = makeFakeImage(nx, ny, xy, fluxs, [3.0*fwhm0]*nSrc)
    mimg.writeFits("foo.fits")

    nSrcB = nSrc - 4
    mimgB = makeFakeImage(nx, ny, xy[0:nSrcB], fluxs[0:nSrcB], [3.0*fwhm0]*nSrcB)
    mimgB.writeFits("fooB.fits")

    # Run the detection
    fp = detect(mimg)

    # deblend mimgB (missing a peak) using the fp with the extra peak
    deb = measDeb.deblend(fp, mimgB, psf, fwhm0, verbose=True, rampFluxAtEdge=True, log=log)
    print("Deblended peaks: ", len(deb.peaks))

    fig = makePortionFigure(deb, mimg, mimgB)
    fig.savefig("test.png")
Ejemplo n.º 8
0
    def setUp(self):

        # Load sample input from disk
        testDir = os.path.dirname(__file__)
        self.srcCat = afwTable.SourceCatalog.readFits(
            os.path.join(testDir, "data", "v695833-e0-c000.xy.fits"))

        self.srcCat["slot_ApFlux_fluxSigma"] = 1
        self.srcCat["slot_PsfFlux_fluxSigma"] = 1

        # The .xy.fits file has sources in the range ~ [0,2000],[0,4500]
        # which is bigger than the exposure
        self.bbox = afwGeom.Box2I(afwGeom.Point2I(0, 0),
                                  afwGeom.Extent2I(2048, 4612))
        smallExposure = afwImage.ExposureF(
            os.path.join(testDir, "data", "v695833-e0-c000-a00.sci.fits"))
        self.exposure = afwImage.ExposureF(self.bbox)
        self.exposure.setWcs(smallExposure.getWcs())
        self.exposure.setFilter(smallExposure.getFilter())
        self.exposure.setCalib(smallExposure.getCalib())

        # Make a reference loader
        butler = Butler(RefCatDir)
        self.refObjLoader = LoadIndexedReferenceObjectsTask(butler=butler)
        logLevel = Log.TRACE
        self.log = Log.getLogger('testPhotoCal')
        self.log.setLevel(logLevel)

        self.config = PhotoCalConfig()

        # The test and associated data have been prepared on the basis that we
        # use the PsfFlux to perform photometry.
        self.config.fluxField = "base_PsfFlux_flux"
Ejemplo n.º 9
0
 def insertDatasets(self, registry, datastore):
     """Add all Dataset entries to the given Registry and Datastore.
     """
     log = Log.getLogger("lsst.daf.butler.gen2convert")
     for repo in self.repos.values():
         for datasetTypeName, datasets in repo.gen2.datasets.items():
             datasetType = self.datasetTypes.get(datasetTypeName, None)
             if datasetType is None:
                 log.debug("Skipping insertion of '%s' from %s",
                           datasetTypeName, repo.gen2.root)
                 continue
             log.info("Inserting '%s' from %s", datasetTypeName,
                      repo.gen2.root)
             collectionTemplate = self.config["collections.overrides"].get(
                 datasetTypeName, None)
             if collectionTemplate is None:
                 collection = repo.run.collection
                 registry.ensureRun(repo.run)
                 run = repo.run
             translator = repo.translators[datasetTypeName]
             for dataset in datasets.values():
                 gen3id = translator(dataset.dataId)
                 if collectionTemplate is not None:
                     allIds = dataset.dataId.copy()
                     allIds.update(gen3id)
                     collection = collectionTemplate.format(**allIds)
                     run = self.runs.setdefault(collection,
                                                Run(collection=collection))
                     registry.ensureRun(run)
                 log.debug("Adding Dataset %s as %s in %s",
                           dataset.filePath, gen3id, repo.run)
                 ref = registry.addDataset(datasetType, gen3id, run)
                 datastore.ingest(path=os.path.relpath(
                     dataset.fullPath, start=datastore.root),
                                  ref=ref)
Ejemplo n.º 10
0
def exposureWriteFitsWithOptions(self, dest, options):
    """Write an Exposure or MaskedImage to FITS, with options

    Parameters
    ----------
    dest : `str`
        Fits file path to which to write the exposure or masked image.
    options : `lsst.daf.base.PropertySet`
        Write options. The items "image", "mask" and "variance" are read.
        Each must be an `lsst.daf.base.PropertySet` with data for
        ``lsst.afw.fits.ImageWriteOptions``.
    """
    if options is not None:
        try:
            writeOptionDict = {
                name + "Options":
                ImageWriteOptions(options.getPropertySet(name))
                for name in ("image", "mask", "variance")
            }
        except Exception as e:
            log = Log.getLogger("lsst.afw.image")
            log.warn(
                "Could not parse options; writing with defaults: {}".format(e))
        else:
            self.writeFits(dest, **writeOptionDict)
            return
    self.writeFits(dest)
Ejemplo n.º 11
0
    def __init__(self, uri, create):
        # late import allows systems wihtout swiftclient to import this but not
        # fail as long as they don't try to use it.
        import swiftclient
        self.swift = swiftclient

        self._log = Log.getLogger("daf.persistence.butler")
        self._uri = uri
        scheme, \
            self._url, \
            self._version, \
            self._tenantName, \
            self._containerName = self._parseURI(uri)
        self._url = "http://" + os.path.join(self._url, self._version)
        self._connection = self._getConnection()

        self.fileCache = {}  # (location, file handle)

        if not self.containerExists():
            if not create:
                raise dafPersist.NoRepositroyAtRoot(
                    "No repository at {}".format(uri))
            else:
                try:
                    self._connection.put_container(self._containerName)
                except self.swift.ClientException:
                    raise RuntimeError(
                        "Connection to {} tenant '{}' failed.".format(
                            self._url, self._tenantName))
Ejemplo n.º 12
0
def _assignClusters(yvec, centers):
    """Return a vector of centerIds based on their distance to the centers"""
    assert len(centers) > 0

    minDist = numpy.nan*numpy.ones_like(yvec)
    clusterId = numpy.empty_like(yvec)
    clusterId.dtype = int               # zeros_like(..., dtype=int) isn't in numpy 1.5
    dbl = Log.getLogger("objectSizeStarSelector._assignClusters")
    dbl.setLevel(dbl.INFO)

    # Make sure we are logging aall numpy warnings...
    oldSettings = numpy.seterr(all="warn")
    with warnings.catch_warnings(record=True) as w:
        warnings.simplefilter("always")
        for i, mean in enumerate(centers):
            dist = abs(yvec - mean)
            if i == 0:
                update = dist == dist       # True for all points
            else:
                update = dist < minDist
                if w:  # Only do if w is not empty i.e. contains a warning message
                    dbl.trace(str(w[-1]))

            minDist[update] = dist[update]
            clusterId[update] = i
    numpy.seterr(**oldSettings)

    return clusterId
 def testWriteCfg(self):
     # The number of writers to use can result in too many open files
     # We calculate this as the 80% of the maximum allowed number for this
     # process, or 1000, whichever is smaller.
     numWriters = 1000
     try:
         import resource
         limit = resource.getrlimit(resource.RLIMIT_NOFILE)
         allowedOpen = int(limit[0] * 0.8)
         if allowedOpen < numWriters:
             numWriters = allowedOpen
     except Exception:
         # Use the default number if we had trouble obtaining resources
         pass
     startTime = time.time()
     go = multiprocessing.Value('b', False)
     cfg = dp.RepositoryCfg(root=os.path.join(self.testDir), mapper='bar', mapperArgs={},
                            parents=None, policy=None)
     procs = [multiprocessing.Process(target=TestOneThousandWriters.writeCfg, args=(cfg, go))
              for x in range(numWriters)]
     for proc in procs:
         proc.start()
     go = True
     for proc in procs:
         proc.join()
     endTime = time.time()
     log = Log.getLogger("daf.persistence")
     log.trace("TestOneThousandWriters took {} seconds.".format(endTime-startTime))
Ejemplo n.º 14
0
    def testWriteCfg(self):
        """Test parallel writes to a configuration file.

        multiprocessing is used to spawn several writer function executions,
        all of which wait to be released by the condition variable "go".

        There are no asserts here, so success is measured solely by not
        failing with an exception, but the time it took to do the writes can
        be logged as a potential performance metric.
        """
        numWriters = 3
        startTime = time.time()
        go = multiprocessing.Value('b', False)
        cfg = dp.RepositoryCfg(root=os.path.join(self.testDir),
                               mapper='bar',
                               mapperArgs={},
                               parents=None,
                               policy=None)
        procs = [
            multiprocessing.Process(target=TestMultipleWriters.writeCfg,
                                    args=(cfg, go)) for x in range(numWriters)
        ]
        for proc in procs:
            proc.start()
        go = True
        for proc in procs:
            proc.join()
        endTime = time.time()
        log = Log.getLogger("daf.persistence")
        log.trace("TestMultipleWriters took {} seconds.".format(endTime -
                                                                startTime))
Ejemplo n.º 15
0
    def __init__(self, camera=None, detector=None, log=None, **kwargs):
        self._instrument = None
        self._raftName = None
        self._slotName = None
        self._detectorName = None
        self._detectorSerial = None
        self._detectorId = None
        self._filter = None
        self._calibId = None
        self._metadata = PropertyList()
        self.setMetadata(PropertyList())
        self.calibInfoFromDict(kwargs)

        # Define the required attributes for this calibration.
        self.requiredAttributes = set(['_OBSTYPE', '_SCHEMA', '_VERSION'])
        self.requiredAttributes.update([
            '_instrument', '_raftName', '_slotName', '_detectorName',
            '_detectorSerial', '_detectorId', '_filter', '_calibId',
            '_metadata'
        ])

        self.log = log if log else Log.getLogger(__name__.partition(".")[2])

        if detector:
            self.fromDetector(detector)
        self.updateMetadata(camera=camera, detector=detector)
Ejemplo n.º 16
0
    def append(self, val):
        try:
            bkgd, interpStyle, undersampleStyle, approxStyle, approxOrderX, approxOrderY, approxWeighting = val
        except TypeError:
            bkgd = val
            interpStyle = None
            undersampleStyle = None
            approxStyle = None
            approxOrderX = None
            approxOrderY = None
            approxWeighting = None

        # Check to see if the Background is actually a BackgroundMI.
        # Such special treatment is not generally a good idea as it is against the whole idea of subclassing.
        # However, lsst.afw.math.makeBackground() returns a Background, even though it's really a BackgroundMI
        # under the covers.  Persistence requires that the type python sees is the actual type under the covers
        # (or it will call the wrong python class's python persistence methods).
        # The real solution is to not use makeBackground() in python but call the constructor directly;
        # however there is already code using makeBackground(), so this is an attempt to assist the user.
        subclassed = afwMath.cast_BackgroundMI(bkgd)
        if subclassed is not None:
            bkgd = subclassed
        else:
            logger = Log.getLogger("afw.BackgroundList.append")
            logger.warn(
                "Unrecognised Background object %s may be unpersistable.",
                bkgd)

        bgInfo = (bkgd, interpStyle, undersampleStyle, approxStyle,
                  approxOrderX, approxOrderY, approxWeighting)
        self._backgrounds.append(bgInfo)
Ejemplo n.º 17
0
 def insertSkyMaps(self, registry):
     """Add all necessary SkyMap DataUnits (and associated Tracts and
     Patches) to the Registry.
     """
     log = Log.getLogger("lsst.daf.butler.gen2convert")
     for sha1, skyMap in self.skyMaps.items():
         skyMapName = self.skyMapNames.get(sha1, None)
         try:
             existing, = registry.query(
                 "SELECT skymap FROM SkyMap WHERE sha1=:sha1", sha1=sha1)
             if skyMapName is None:
                 skyMapName = existing["skymap"]
                 self.skyMapNames[sha1] = skyMapName
                 log.debug("Using preexisting SkyMap '%s' with sha1=%s",
                           skyMapName, sha1.hex())
             if skyMapName != existing["skymap"]:
                 raise ValueError((
                     "SkyMap with new name={} and sha1={} already exists in the Registry "
                     "with name={}".format(skyMapName, sha1.hex(),
                                           existing["skymap"])))
             continue
         except ValueError:
             # No SkyMap with this sha1 exists, so we need to insert it.
             pass
         if skyMapName is None:
             raise LookupError(
                 ("SkyMap with sha1={} has no name "
                  "and does not already exist in the Registry.").format(
                      sha1.hex()))
         log.info("Inserting SkyMap '%s' with sha1=%s", skyMapName,
                  sha1.hex())
         skyMap.register(skyMapName, registry)
Ejemplo n.º 18
0
 def testWriteCfg(self):
     # The number of writers to use can result in too many open files
     # We calculate this as the 80% of the maximum allowed number for this
     # process, or 1000, whichever is smaller.
     numWriters = 1000
     try:
         import resource
         limit = resource.getrlimit(resource.RLIMIT_NOFILE)
         allowedOpen = int(limit[0] * 0.8)
         if allowedOpen < numWriters:
             numWriters = allowedOpen
     except:
         # Use the default number if we had trouble obtaining resources
         pass
     startTime = time.time()
     go = multiprocessing.Value('b', False)
     cfg = dp.RepositoryCfg(root=os.path.join(self.testDir),
                            mapper='bar',
                            mapperArgs={},
                            parents=None,
                            policy=None)
     procs = [
         multiprocessing.Process(target=TestOneThousandWriters.writeCfg,
                                 args=(cfg, go)) for x in range(numWriters)
     ]
     for proc in procs:
         proc.start()
     go = True
     for proc in procs:
         proc.join()
     endTime = time.time()
     log = Log.getLogger("daf.persistence")
     log.trace("TestOneThousandWriters took {} seconds.".format(endTime -
                                                                startTime))
Ejemplo n.º 19
0
 def insertObservations(self, registry):
     """Add all necessary visit and exposure Dimensions to the Registry.
     """
     log = Log.getLogger("lsst.daf.butler.gen2convert")
     for mapperName, nested in self.obsInfo.items():
         instrument = self.config["mappers", mapperName, "instrument"]
         log.info(
             "Inserting exposure and visit Dimensions for instrument '%s'",
             instrument)
         for obsInfoId, (obsInfo, filt) in nested.items():
             # TODO: generalize this to instruments with snaps and/or
             # compound gen2 visit/exposure IDs
             visitId, = obsInfoId
             exposureId, = obsInfoId
             # TODO: skip insertion if Dimensions already exist.
             dataId = DataId(instrument=instrument,
                             visit=visitId,
                             physical_filter=filt,
                             exposure=exposureId,
                             universe=registry.dimensions)
             updateVisitEntryFromObsInfo(dataId, obsInfo)
             updateExposureEntryFromObsInfo(dataId, obsInfo)
             log.debug("Inserting exposure %d and visit %d.", exposureId,
                       visitId)
             registry.addDimensionEntry("visit", dataId)
             registry.addDimensionEntry("exposure", dataId)
 def __init__(self, uri, create):
     self.log = Log.getLogger("daf.persistence.butler")
     self.root = self._pathFromURI(uri)
     if self.root and not os.path.exists(self.root):
         if not create:
             raise NoRepositroyAtRoot("No repository at {}".format(uri))
         safeMakeDir(self.root)
Ejemplo n.º 21
0
    def __init__(self):
        # Set up defaults to send to deblender

        # Always deblend as Psf
        self.psfChisqCut1 = self.psfChisqCut2 = self.psfChisqCut2b = np.inf
        self.log = Log.getLogger('ip.diffim.DipoleDeblender')
        self.sigma2fwhm = 2. * np.sqrt(2. * np.log(2.))
Ejemplo n.º 22
0
    def setUp_tests(self, butler, mapper, dataIds):
        """
        Set up the necessary shared variables used by multiple tests.

        Parameters
        ----------
        butler: lsst.daf.persistence.Butler
            A butler object, instantiated on the testdata repository for the
            obs package being tested.
        mapper: lsst.obs.CameraMapper
            A CameraMapper object for your camera, instantiated on the testdata
            repository the obs package being tested.
        dataIds: dict
            dictionary of (exposure name): (dataId of that exposure in the
            testdata repository), with unittest.SkipTest as the value for any
            exposures you do not have/do not want to test. It must contain a
            valid 'raw' dataId, in addition to 'bias','flat','dark', which may
            be set to SkipTest. For example::
                  self.dataIds = {'raw': {'visit': 1, 'filter': 'g'},
                                  'bias': {'visit': 1},
                                  'flat': {'visit': 1},
                                  'dark': unittest.SkipTest
                                  }
        """
        self.butler = butler
        self.mapper = mapper
        self.dataIds = dataIds
        self.log = Log.getLogger('ObsTests')
Ejemplo n.º 23
0
    def __call__(self, args):
        """Run the task on a single target.

        This implementation is nearly equivalent to the overridden one, but
        it never writes out metadata and always returns results. For memory
        efficiency reasons, the return value is exactly the one of |run|,
        rather than a :class:`~lsst.pipe.base.Struct` wrapped around it.
        """
        data_ref, kwargs = args
        if self.log is None:
            self.log = Log.getDefaultLogger()
        if hasattr(data_ref, "dataId"):
            self.log.MDC("LABEL", str(data_ref.dataId))
        elif isinstance(data_ref, (list, tuple)):
            self.log.MDC("LABEL", str([ref.dataId for ref in data_ref if hasattr(ref, "dataId")]))
        task = self.makeTask(args=args)
        result = None
        try:
            result = task.run(data_ref, **kwargs)
        except Exception, e:
            if self.doRaise:
                raise
            if hasattr(data_ref, "dataId"):
                task.log.fatal("Failed on dataId=%s: %s" % (data_ref.dataId, e))
            elif isinstance(data_ref, (list, tuple)):
                task.log.fatal("Failed on dataId=[%s]: %s" %
                               (",".join([str(_.dataId) for _ in data_ref]), e))
            else:
                task.log.fatal("Failed on dataRef=%s: %s" % (data_ref, e))
            if not isinstance(e, pipe_base.TaskError):
                traceback.print_exc(file=sys.stderr)
Ejemplo n.º 24
0
 def insertDatasetTypes(self, registry):
     """Add all necessary DatasetType registrations to the Registry.
     """
     log = Log.getLogger("lsst.daf.butler.gen2convert")
     for datasetType in self.datasetTypes.values():
         log.debug("Registering DatasetType '%s'." % datasetType.name)
         registry.registerDatasetType(datasetType)
Ejemplo n.º 25
0
 def __init__(self, config, gen2repos, skyMaps, skyMapRoots, obsInfo):
     log = Log.getLogger("lsst.daf.butler.gen2convert")
     self.config = Config(config)
     self.skyMaps = skyMaps
     self.obsInfo = obsInfo
     self.repos = OrderedDict()
     self.datasetTypes = dict()
     self.runs = {
         k: Run(id=v, collection=k)
         for k, v in self.config["runs"].items()
     }
     self.skyMapNames = {}  # mapping from hash to Gen3 skymap name
     skyMapConfig = self.config.get("skymaps", {})
     # Swap keys and values in skyMapConfig; the original can't be in
     # the order we want, because roots can have '.', and that gets
     # interpreted specially by Config when used as a key.
     rootToSkyMapName = {v: k for k, v in skyMapConfig.items()}
     for hash, skyMap in self.skyMaps.items():
         log.debug("Processing input skyMap with hash=%s", hash.hex())
         for root in skyMapRoots[hash]:
             log.debug("Processing input skyMapRoot %s", root)
             skyMapName = rootToSkyMapName.get(root, None)
             if skyMapName is not None:
                 log.debug("Using '%s' for skymap with hash=%s", skyMapName,
                           hash.hex())
                 self.skyMapNames[hash] = skyMapName
                 break
     # Ideally we'd get the dimension universe from a Registry, but that
     # would require restructuring things in breaking ways, and I'm hoping
     # to just remove all of this code in favor of
     # obs.base.gen3.RepoConverter anyway.
     universe = DimensionUniverse.fromConfig()
     for gen2repo in gen2repos.values():
         self._addConvertedRepoSorted(gen2repo, universe)
Ejemplo n.º 26
0
 def __init__(self, uri, create):
     self.log = Log.getLogger("daf.persistence.butler")
     self.root = self._pathFromURI(uri)
     if self.root and not os.path.exists(self.root):
         if not create:
             raise NoRepositroyAtRoot("No repository at {}".format(uri))
         safeMakeDir(self.root)
Ejemplo n.º 27
0
 def insertCalibrationLabels(self, registry):
     """Add all necessary calibration_label Dimension entries to the
     Registry.
     """
     log = Log.getLogger("lsst.daf.butler.gen2convert")
     for repo in self.repos.values():
         if repo.gen2.calibDict is None:
             continue
         # TODO: we currently implicitly assume that there is only one
         # calib repo being converted, or at least that different calib
         # repos don't have any of the same calibDates.  To fix that we
         # probably need to add a column to the calibration_label table
         # to represent a "CalibrationSet", and provide a way to configure
         # which one a Registry uses.  We'll probably also want to use that
         # pattern for other dimensions in the future, such as systems of
         # observation relationships that define a particular mapping from
         # exposure to visit.
         mapperName = repo.gen2.MapperClass.__name__
         instrument = self.config["mappers", mapperName, "instrument"]
         log.debug("Inserting unbounded calibration_label.")
         addUnboundedCalibrationLabel(registry, instrument)
         for (datasetTypeName, calibDate, ccd,
              filter), (first, last) in repo.gen2.calibDict.items():
             dataId = DataId(calibration_label=makeCalibrationLabel(
                 datasetTypeName, calibDate, ccd=ccd, filter=filter),
                             instrument=instrument,
                             universe=registry.dimensions)
             dataId.entries["calibration_label"]["valid_first"] = first
             dataId.entries["calibration_label"][
                 "valid_last"] = last + timedelta(days=1)
             log.debug(
                 "Inserting calibration_label %s with validity range %s - %s.",
                 dataId["calibration_label"], first, last)
             registry.addDimensionEntry("calibration_label", dataId)
Ejemplo n.º 28
0
    def testBasics(self):
        """!Test basic functionality of LinearizeSquared
        """
        for imageClass in (afwImage.ImageF, afwImage.ImageD):
            inImage = makeRampImage(bbox=self.bbox,
                                    start=-5,
                                    stop=2500,
                                    imageClass=imageClass)

            measImage = inImage.Factory(inImage, True)
            linSq = LinearizeSquared()
            linRes = linSq(image=measImage, detector=self.detector)
            desNumLinearized = np.sum(self.sqCoeffs.flatten() > 0)
            self.assertEqual(linRes.numLinearized, desNumLinearized)
            self.assertEqual(linRes.numAmps,
                             len(self.detector.getAmpInfoCatalog()))

            refImage = inImage.Factory(inImage, True)
            refLinearizeSquared(image=refImage, detector=self.detector)

            self.assertImagesAlmostEqual(refImage, measImage)

            # make sure logging is accepted
            log = Log.getLogger("ip.isr.LinearizeSquared")
            linRes = linSq(image=measImage, detector=self.detector, log=log)
Ejemplo n.º 29
0
 def insertSkyMaps(self, registry):
     """Add all necessary SkyMap Dimensions (and associated tracts and
     patches) to the Registry.
     """
     log = Log.getLogger("lsst.daf.butler.gen2convert")
     for hash, skyMap in self.skyMaps.items():
         skyMapName = self.skyMapNames.get(hash, None)
         try:
             existing, = registry.query(
                 "SELECT skymap FROM skymap WHERE hash=:hash", hash=hash)
             if skyMapName is None:
                 skyMapName = existing["skymap"]
                 self.skyMapNames[hash] = skyMapName
                 log.debug("Using preexisting skymap '%s' with hash=%s",
                           skyMapName, hash.hex())
             if skyMapName != existing["skymap"]:
                 raise ValueError((
                     "skymap with new name={} and hash={} already exists in the Registry "
                     "with name={}".format(skyMapName, hash.hex(),
                                           existing["skymap"])))
             continue
         except ValueError:
             # No skymap with this hash exists, so we need to insert it.
             pass
         if skyMapName is None:
             raise LookupError(
                 ("skymap with hash={} has no name "
                  "and does not already exist in the Registry.").format(
                      hash.hex()))
         log.info("Inserting skymap '%s' with hash=%s", skyMapName,
                  hash.hex())
         skyMap.register(skyMapName, registry)
Ejemplo n.º 30
0
    def __init__(self, config=None, name=None, parentTask=None, log=None):
        self.metadata = dafBase.PropertyList()
        self._parentTask = parentTask

        if parentTask is not None:
            if name is None:
                raise RuntimeError("name is required for a subtask")
            self._name = name
            self._fullName = parentTask._computeFullName(name)
            if config is None:
                config = getattr(parentTask.config, name)
            self._taskDict = parentTask._taskDict
            loggerName = parentTask.log.getName() + '.' + name
        else:
            if name is None:
                name = getattr(self, "_DefaultName", None)
                if name is None:
                    raise RuntimeError(
                        "name is required for a task unless it has attribute _DefaultName"
                    )
                name = self._DefaultName
            self._name = name
            self._fullName = self._name
            if config is None:
                config = self.ConfigClass()
            self._taskDict = dict()
            loggerName = self._fullName
            if log is not None and log.getName():
                loggerName = log.getName() + '.' + loggerName

        self.log = Log.getLogger(loggerName)
        self.config = config
        self._display = lsstDebug.Info(self.__module__).display
        self._taskDict[self._fullName] = self
Ejemplo n.º 31
0
    def __init__(self):
        # Set up defaults to send to deblender

        # Always deblend as Psf
        self.psfChisqCut1 = self.psfChisqCut2 = self.psfChisqCut2b = np.inf
        self.log = Log.getLogger('ip.diffim.DipoleDeblender')
        self.sigma2fwhm = 2. * np.sqrt(2. * np.log(2.))
Ejemplo n.º 32
0
    def __init__(self, config=None, name=None, parentTask=None, log=None):
        self.metadata = dafBase.PropertyList()
        self._parentTask = parentTask

        if parentTask is not None:
            if name is None:
                raise RuntimeError("name is required for a subtask")
            self._name = name
            self._fullName = parentTask._computeFullName(name)
            if config is None:
                config = getattr(parentTask.config, name)
            self._taskDict = parentTask._taskDict
            loggerName = parentTask.log.getName() + '.' + name
        else:
            if name is None:
                name = getattr(self, "_DefaultName", None)
                if name is None:
                    raise RuntimeError("name is required for a task unless it has attribute _DefaultName")
                name = self._DefaultName
            self._name = name
            self._fullName = self._name
            if config is None:
                config = self.ConfigClass()
            self._taskDict = dict()
            loggerName = self._fullName
            if log is not None and log.getName():
                loggerName = log.getName() + '.' + loggerName

        self.log = Log.getLogger(loggerName)
        self.config = config
        self._display = lsstDebug.Info(self.__module__).display
        self._taskDict[self._fullName] = self
def _assignClusters(yvec, centers):
    """Return a vector of centerIds based on their distance to the centers"""
    assert len(centers) > 0

    minDist = numpy.nan * numpy.ones_like(yvec)
    clusterId = numpy.empty_like(yvec)
    clusterId.dtype = int  # zeros_like(..., dtype=int) isn't in numpy 1.5
    dbl = Log.getLogger("objectSizeStarSelector._assignClusters")
    dbl.setLevel(dbl.INFO)

    # Make sure we are logging aall numpy warnings...
    oldSettings = numpy.seterr(all="warn")
    with warnings.catch_warnings(record=True) as w:
        warnings.simplefilter("always")
        for i, mean in enumerate(centers):
            dist = abs(yvec - mean)
            if i == 0:
                update = dist == dist  # True for all points
            else:
                update = dist < minDist
                if w:  # Only do if w is not empty i.e. contains a warning message
                    dbl.trace(str(w[-1]))

            minDist[update] = dist[update]
            clusterId[update] = i
    numpy.seterr(**oldSettings)

    return clusterId
    def setUp(self):
        np.random.seed(12345)

        n_points = 1000
        # reference_obj_array is a number array representing
        # 3D points randomly draw on a 1 sq deg patch.
        self.reference_obj_array = np.empty((n_points, 4))
        cos_theta_array = np.random.uniform(
            np.cos(np.pi / 2 + 0.5 * __deg_to_rad__),
            np.cos(np.pi / 2 - 0.5 * __deg_to_rad__),
            size=n_points)
        sin_theta_array = np.sqrt(1 - cos_theta_array**2)
        phi_array = np.random.uniform(-0.5, 0.5,
                                      size=n_points) * __deg_to_rad__
        self.reference_obj_array[:, 0] = sin_theta_array * np.cos(phi_array)
        self.reference_obj_array[:, 1] = sin_theta_array * np.sin(phi_array)
        self.reference_obj_array[:, 2] = cos_theta_array
        self.reference_obj_array[:, 3] = (
            np.random.power(1.2, size=n_points) * 4 + 20)

        # Our initial source catalog is a straight copy of the reference
        # array at first. In some of the tests we will add rotations and
        # shifts to the data in order to test the input and outputs of our
        # matcher.
        self.source_obj_array = copy(self.reference_obj_array)
        self.log = Log()
Ejemplo n.º 35
0
 def insertObservations(self, registry):
     """Add all necessary Visit and Exposure DataUnits to the Registry.
     """
     log = Log.getLogger("lsst.daf.butler.gen2convert")
     for mapperName, nested in self.visitInfo.items():
         camera = self.config["mappers"][mapperName]["camera"]
         log.info("Inserting Exposure and Visit DataUnits for Camera '%s'",
                  camera)
         for visitInfoId, (visitInfo, filt) in nested.items():
             # TODO: generalize this to cameras with snaps and/or compound gen2 visit/exposure IDs
             visitId, = visitInfoId
             exposureId, = visitInfoId
             # TODO: skip insertion if DataUnits already exist.
             mid = visitInfo.getDate().toPython()
             offset = datetime.timedelta(seconds=0.5 *
                                         visitInfo.getExposureTime())
             commonValues = {
                 "camera":
                 camera,
                 "visit":
                 visitId,
                 "physical_filter":
                 filt,
                 "datetime_begin":
                 mid - offset,
                 "exposure_time":
                 visitInfo.getExposureTime(),
                 "boresight_az":
                 visitInfo.getBoresightAzAlt().getLongitude().asDegrees(),
                 "boresight_alt":
                 visitInfo.getBoresightAzAlt().getLatitude().asDegrees(),
                 "rot_angle":
                 visitInfo.getBoresightRotAngle().asDegrees(),
             }
             exposureValues = commonValues.copy()
             exposureValues.update({
                 "exposure": exposureId,
                 "snap": 0,
                 "dark_time": visitInfo.getDarkTime()
             })
             visitValues = commonValues.copy()
             visitValues.update({
                 "datetime_end":
                 mid + offset,
                 "earth_rotation_angle":
                 visitInfo.getEra().asDegrees(),
                 "boresight_ra":
                 visitInfo.getBoresightRaDec().getLongitude().asDegrees(),
                 "boresight_dec":
                 visitInfo.getBoresightRaDec().getLatitude().asDegrees(),
                 "boresight_parallactic_angle":
                 visitInfo.getBoresightParAngle().asDegrees(),
                 "local_era":
                 visitInfo.getLocalEra().asDegrees(),
             })
             log.debug("Inserting Exposure %d and Visit %d.", exposureId,
                       visitId)
             registry.addDataUnitEntry("Visit", visitValues)
             registry.addDataUnitEntry("Exposure", exposureValues)
Ejemplo n.º 36
0
    def __init__(self, config):
        self.config = config
        self.log = Log.getLogger("ip.diffim.DiaSourceAnalysis")

        self.bitMask = 0
        srcBadMaskPlanes = self.config.srcBadMaskPlanes
        for maskPlane in srcBadMaskPlanes:
            self.bitMask |= afwImage.Mask.getPlaneBitMask(maskPlane)
    def __init__(self, config):
        self.config = config
        self.log = Log.getLogger("ip.diffim.DiaSourceAnalysis")

        self.bitMask = 0
        srcBadMaskPlanes = self.config.srcBadMaskPlanes
        for maskPlane in srcBadMaskPlanes:
            self.bitMask |= afwImage.Mask.getPlaneBitMask(maskPlane)
Ejemplo n.º 38
0
    def makeDataRefList(self, namespace):
        """Make self.refList from self.idList
        """
        if self.datasetType is None:
            raise RuntimeError("Must call setDatasetType first")
        log = Log.getLogger(
            "meas.base.forcedPhotCcd.PerTractCcdDataIdContainer")
        skymap = None
        visitTract = collections.defaultdict(
            set)  # Set of tracts for each visit
        visitRefs = collections.defaultdict(
            list)  # List of data references for each visit
        for dataId in self.idList:
            if "tract" not in dataId:
                # Discover which tracts the data overlaps
                log.info(
                    "Reading WCS for components of dataId=%s to determine tracts",
                    dict(dataId))
                if skymap is None:
                    skymap = namespace.butler.get(namespace.config.coaddName +
                                                  "Coadd_skyMap")

                for ref in namespace.butler.subset("calexp", dataId=dataId):
                    if not ref.datasetExists("calexp"):
                        continue

                    visit = ref.dataId["visit"]
                    visitRefs[visit].append(ref)

                    md = ref.get("calexp_md", immediate=True)
                    wcs = lsst.afw.image.makeWcs(md)
                    box = lsst.afw.geom.Box2D(
                        lsst.afw.geom.Point2D(0, 0),
                        lsst.afw.geom.Point2D(md.get("NAXIS1"),
                                              md.get("NAXIS2")))
                    # Going with just the nearest tract.  Since we're throwing all tracts for the visit
                    # together, this shouldn't be a problem unless the tracts are much smaller than a CCD.
                    tract = skymap.findTract(wcs.pixelToSky(box.getCenter()))
                    if overlapsTract(tract, wcs, box):
                        visitTract[visit].add(tract.getId())
            else:
                self.refList.extend(ref for ref in namespace.butler.subset(
                    self.datasetType, dataId=dataId))

        # Ensure all components of a visit are kept together by putting them all in the same set of tracts
        for visit, tractSet in visitTract.items():
            for ref in visitRefs[visit]:
                for tract in tractSet:
                    self.refList.append(
                        namespace.butler.dataRef(datasetType=self.datasetType,
                                                 dataId=ref.dataId,
                                                 tract=tract))
        if visitTract:
            tractCounter = collections.Counter()
            for tractSet in visitTract.values():
                tractCounter.update(tractSet)
            log.info("Number of visits for each tract: %s", dict(tractCounter))
Ejemplo n.º 39
0
    def testLog(self):
        """Test the Task's logger
        """
        addMultTask = AddMultTask()
        self.assertEqual(addMultTask.log.getName(), "addMult")
        self.assertEqual(addMultTask.add.log.getName(), "addMult.add")

        log = Log.getLogger("tester")
        addMultTask = AddMultTask(log=log)
        self.assertEqual(addMultTask.log.getName(), "tester.addMult")
        self.assertEqual(addMultTask.add.log.getName(), "tester.addMult.add")
Ejemplo n.º 40
0
 def testOverrides(self):
     """Test config and log override
     """
     config = ExampleTask.ConfigClass()
     config.floatField = -99.9
     log = Log.getLogger("cmdLineTask")
     retVal = ExampleTask.parseAndRun(
         args=[DataPath, "--output", self.outPath, "--id", "visit=2"],
         config=config,
         log=log
     )
     self.assertEqual(retVal.parsedCmd.config.floatField, -99.9)
     self.assertIs(retVal.parsedCmd.log, log)
Ejemplo n.º 41
0
def plot(mag, width, centers, clusterId, marker="o", markersize=2, markeredgewidth=0, ltype='-',
         magType="model", clear=True):

    log = Log.getLogger("objectSizeStarSelector.plot")
    try:
        import matplotlib.pyplot as plt
    except ImportError as e:
        log.warn("Unable to import matplotlib: %s", e)
        return

    try:
        fig
    except NameError:
        fig = plt.figure()
    else:
        if clear:
            fig.clf()

    axes = fig.add_axes((0.1, 0.1, 0.85, 0.80))

    xmin = sorted(mag)[int(0.05*len(mag))]
    xmax = sorted(mag)[int(0.95*len(mag))]

    axes.set_xlim(-17.5, -13)
    axes.set_xlim(xmin - 0.1*(xmax - xmin), xmax + 0.1*(xmax - xmin))
    axes.set_ylim(0, 10)

    colors = ["r", "g", "b", "c", "m", "k", ]
    for k, mean in enumerate(centers):
        if k == 0:
            axes.plot(axes.get_xlim(), (mean, mean,), "k%s" % ltype)

        li = (clusterId == k)
        axes.plot(mag[li], width[li], marker, markersize=markersize, markeredgewidth=markeredgewidth,
                  color=colors[k % len(colors)])

    li = (clusterId == -1)
    axes.plot(mag[li], width[li], marker, markersize=markersize, markeredgewidth=markeredgewidth,
              color='k')

    if clear:
        axes.set_xlabel("Instrumental %s mag" % magType)
        axes.set_ylabel(r"$\sqrt{(I_{xx} + I_{yy})/2}$")

    return fig
Ejemplo n.º 42
0
    def makeDataRefList(self, namespace):
        """Make self.refList from self.idList
        """
        if self.datasetType is None:
            raise RuntimeError("Must call setDatasetType first")
        log = Log.getLogger("meas.base.forcedPhotCcd.PerTractCcdDataIdContainer")
        skymap = None
        visitTract = collections.defaultdict(set)   # Set of tracts for each visit
        visitRefs = collections.defaultdict(list)   # List of data references for each visit
        for dataId in self.idList:
            if "tract" not in dataId:
                # Discover which tracts the data overlaps
                log.info("Reading WCS for components of dataId=%s to determine tracts", dict(dataId))
                if skymap is None:
                    skymap = namespace.butler.get(namespace.config.coaddName + "Coadd_skyMap")

                for ref in namespace.butler.subset("calexp", dataId=dataId):
                    if not ref.datasetExists("calexp"):
                        continue

                    visit = ref.dataId["visit"]
                    visitRefs[visit].append(ref)

                    md = ref.get("calexp_md", immediate=True)
                    wcs = lsst.afw.geom.makeSkyWcs(md)
                    box = lsst.geom.Box2D(lsst.afw.image.bboxFromMetadata(md))
                    # Going with just the nearest tract.  Since we're throwing all tracts for the visit
                    # together, this shouldn't be a problem unless the tracts are much smaller than a CCD.
                    tract = skymap.findTract(wcs.pixelToSky(box.getCenter()))
                    if imageOverlapsTract(tract, wcs, box):
                        visitTract[visit].add(tract.getId())
            else:
                self.refList.extend(ref for ref in namespace.butler.subset(self.datasetType, dataId=dataId))

        # Ensure all components of a visit are kept together by putting them all in the same set of tracts
        for visit, tractSet in visitTract.items():
            for ref in visitRefs[visit]:
                for tract in tractSet:
                    self.refList.append(namespace.butler.dataRef(datasetType=self.datasetType,
                                                                 dataId=ref.dataId, tract=tract))
        if visitTract:
            tractCounter = collections.Counter()
            for tractSet in visitTract.values():
                tractCounter.update(tractSet)
            log.info("Number of visits for each tract: %s", dict(tractCounter))
Ejemplo n.º 43
0
    def setUp(self):

        # Load sample input from disk
        testDir = os.path.dirname(__file__)
        self.srcCat = afwTable.SourceCatalog.readFits(
            os.path.join(testDir, "data", "v695833-e0-c000.xy.fits"))

        self.srcCat["slot_ApFlux_instFluxErr"] = 1
        self.srcCat["slot_PsfFlux_instFluxErr"] = 1

        # The .xy.fits file has sources in the range ~ [0,2000],[0,4500]
        # which is bigger than the exposure
        self.bbox = afwGeom.Box2I(afwGeom.Point2I(0, 0), afwGeom.Extent2I(2048, 4612))
        smallExposure = afwImage.ExposureF(os.path.join(testDir, "data", "v695833-e0-c000-a00.sci.fits"))
        self.exposure = afwImage.ExposureF(self.bbox)
        self.exposure.setWcs(smallExposure.getWcs())
        self.exposure.setFilter(smallExposure.getFilter())
        self.exposure.setPhotoCalib(smallExposure.getPhotoCalib())

        coordKey = self.srcCat.getCoordKey()
        centroidKey = self.srcCat.getCentroidKey()
        wcs = self.exposure.getWcs()
        for src in self.srcCat:
            src.set(coordKey, wcs.pixelToSky(src.get(centroidKey)))

        # Make a reference loader
        butler = Butler(RefCatDir)
        self.refObjLoader = LoadIndexedReferenceObjectsTask(butler=butler)
        logLevel = Log.TRACE
        self.log = Log.getLogger('testPhotoCal')
        self.log.setLevel(logLevel)

        self.config = PhotoCalConfig()
        self.config.match.matchRadius = 0.5
        self.config.match.referenceSelection.doMagLimit = True
        self.config.match.referenceSelection.magLimit.maximum = 22.0
        self.config.match.referenceSelection.magLimit.fluxField = "i_flux"
        self.config.match.referenceSelection.doFlags = True
        self.config.match.referenceSelection.flags.good = ['photometric']
        self.config.match.referenceSelection.flags.bad = ['resolved']
        self.config.match.sourceSelection.doUnresolved = False  # Don't have star/galaxy in the srcCat

        # The test and associated data have been prepared on the basis that we
        # use the PsfFlux to perform photometry.
        self.config.fluxField = "base_PsfFlux_instFlux"
    def __init__(self, rerun=0, basedir='.', **kwargs):
        Mapper.__init__(self)

        print('TractorMapper(): ignoring kwargs', kwargs)

        self.basedir = basedir
        self.rerun = rerun
        self.log = Log.getLogger('TractorMapper')

        indir = os.path.join(self.basedir, 't%(visit)04i')
        outdir = os.path.join(indir, 'rr%(rerun)04i')
        self.filenames = {'outdir': (outdir, None, None),
                          'visitim': (os.path.join(indir, 't.fits'),  # 't_img.fits'), #img.fits'),
                                      'lsst.afw.image.ExposureF', 'ExposureF'),
                          'psf': (os.path.join(outdir, 'psf.boost'),
                                  'lsst.afw.detection.Psf', 'Psf'),
                          'src': (os.path.join(outdir, 'src.boost'),
                                  # dare to dream / keep dreaming
                                  # os.path.join(outdir, 'src.fits'),
                                  # htf did this work before?
                                  # 'lsst.afw.detection.Source', 'Source'),
                                  'lsst.afw.detection.PersistableSourceVector',
                                  'PersistableSourceVector'),
                          'bb': (os.path.join(outdir, 'bb.pickle'),
                                 None, None),
                          'pyfoots': (os.path.join(outdir, 'foots.pickle'),
                                      None, None),
                          'footprints': (os.path.join(outdir, 'foots.boost'),
                                         'lsst.afw.detection.FootprintList',
                                         'FootprintList'),
                          'truesrc': (os.path.join(indir, 'srcs.fits'),
                                      None, None),
                          }
        '''
        for datasetType in ["raw", "bias", "dark", "flat", "fringe",
            "postISR", "postISRCCD", "sdqaAmp", "sdqaCcd",
            "icSrc", "icMatch", "visitim", "psf", "apCorr", "calexp", "src",
            "sourceHist", "badSourceHist", "source", "badSource",
            "invalidSource", "object", "badObject"]:
            '''
        self.keys = ['visit', 'filter']
    def testBasics(self):
        """!Test basic functionality of LinearizeSquared
        """
        for imageClass in (afwImage.ImageF, afwImage.ImageD):
            inImage = makeRampImage(bbox=self.bbox, start=-5, stop=2500, imageClass=imageClass)

            measImage = inImage.Factory(inImage, True)
            linSq = LinearizeSquared()
            linRes = linSq(image=measImage, detector=self.detector)
            desNumLinearized = np.sum(self.sqCoeffs.flatten() > 0)
            self.assertEqual(linRes.numLinearized, desNumLinearized)
            self.assertEqual(linRes.numAmps, len(self.detector.getAmpInfoCatalog()))

            refImage = inImage.Factory(inImage, True)
            refLinearizeSquared(image=refImage, detector=self.detector)

            self.assertImagesAlmostEqual(refImage, measImage)

            # make sure logging is accepted
            log = Log.getLogger("ip.isr.LinearizeSquared")
            linRes = linSq(image=measImage, detector=self.detector, log=log)
Ejemplo n.º 46
0
def imageWriteFitsWithOptions(self, dest, options):
    """Write an Image or Mask to FITS, with options

    Parameters
    ----------
    dest : `str`
        Fits file path to which to write the image or mask.
    options : `lsst.daf.base.PropertySet
        Write options. The item "image" is read. It must contain an
        `lsst.daf.base.PropertySet` with data for
        ``lsst.afw.fits.ImageWriteOptions``.
    """
    if options is not None:
        try:
            writeOptions = ImageWriteOptions(options.getPropertySet("image"))
        except Exception as e:
            log = Log.getLogger("lsst.afw.image")
            log.warn("Could not parse options; writing with defaults: {}".format(e))
        else:
            self.writeFits(dest, writeOptions)
            return
    self.writeFits(dest)
    def testBasics(self):
        """!Test basic functionality of LinearizeLookupTable
        """
        for imageClass in (afwImage.ImageF, afwImage.ImageD):
            inImage = makeRampImage(bbox=self.bbox, start=-5, stop=250, imageClass=imageClass)
            table = self.makeTable(inImage)

            measImage = inImage.Factory(inImage, True)
            llt = LinearizeLookupTable(table=table, detector=self.detector)
            linRes = llt(measImage, self.detector)

            refImage = inImage.Factory(inImage, True)
            refNumOutOfRange = refLinearize(image=refImage, detector=self.detector, table=table)

            self.assertEqual(linRes.numAmps, len(self.detector.getAmpInfoCatalog()))
            self.assertEqual(linRes.numAmps, linRes.numLinearized)
            self.assertEqual(linRes.numOutOfRange, refNumOutOfRange)
            self.assertImagesAlmostEqual(refImage, measImage)

            # make sure logging is accepted
            log = Log.getLogger("ip.isr.LinearizeLookupTable")
            linRes = llt(image=measImage, detector=self.detector, log=log)
Ejemplo n.º 48
0
def exposureWriteFitsWithOptions(self, dest, options):
    """Write an Exposure or MaskedImage to FITS, with options

    Parameters
    ----------
    dest : `str`
        Fits file path to which to write the exposure or masked image.
    options : `lsst.daf.base.PropertySet`
        Write options. The items "image", "mask" and "variance" are read.
        Each must be an `lsst.daf.base.PropertySet` with data for
        ``lsst.afw.fits.ImageWriteOptions``.
    """
    if options is not None:
        try:
            writeOptionDict = {name + "Options": ImageWriteOptions(options.getPropertySet(name))
                               for name in ("image", "mask", "variance")}
        except Exception as e:
            log = Log.getLogger("lsst.afw.image")
            log.warn("Could not parse options; writing with defaults: {}".format(e))
        else:
            self.writeFits(dest, **writeOptionDict)
            return
    self.writeFits(dest)
Ejemplo n.º 49
0
def SafeLockedFileForRead(name):
    """Context manager for reading a file that may be locked with an exclusive lock via
    SafeLockedFileForWrite. This will first acquire a shared lock before returning the file. When the file is
    closed the shared lock will be unlocked.

    Parameters
    ----------
    name : string
        The file name to be opened, may include path.

    Yields
    ------
    file object
        The file to be read from.
    """
    log = Log.getLogger("daf.persistence.butler")
    try:
        with open(name, 'r') as f:
            log.debug("Acquiring shared lock on {}".format(name))
            fcntl.flock(f, fcntl.LOCK_SH)
            log.debug("Acquired shared lock on {}".format(name))
            yield f
    finally:
        log.debug("Releasing shared lock on {}".format(name))
Ejemplo n.º 50
0
def backgroundSubtract(config, maskedImages):
    """Subtract the background from masked images.

    Parameters
    ----------
    config : TODO: DM-17458
        TODO: DM-17458
    maskedImages : `list` of `lsst.afw.image.MaskedImage`
        TODO: DM-17458

    Returns
    -------
    TODO: DM-17458
        TODO: DM-17458
    """
    backgrounds = []
    t0 = time.time()
    algorithm = config.algorithm
    binsize = config.binSize
    undersample = config.undersampleStyle
    bctrl = afwMath.BackgroundControl(algorithm)
    bctrl.setUndersampleStyle(undersample)
    for maskedImage in maskedImages:
        bctrl.setNxSample(maskedImage.getWidth()//binsize + 1)
        bctrl.setNySample(maskedImage.getHeight()//binsize + 1)
        image = maskedImage.getImage()
        backobj = afwMath.makeBackground(image, bctrl)

        image -= backobj.getImageF()
        backgrounds.append(backobj.getImageF())
        del backobj

    t1 = time.time()
    logger = Log.getLogger("ip.diffim.backgroundSubtract")
    logger.debug("Total time for background subtraction : %.2f s", (t1 - t0))
    return backgrounds
Ejemplo n.º 51
0

import lsst.utils.tests
import lsst.utils
import lsst.afw.geom as afwGeom
import lsst.afw.image as afwImage
import lsst.afw.math as afwMath
from lsst.log import Log
import lsst.log.utils as logUtils
import lsst.meas.algorithms as measAlg
import lsst.ip.diffim as ipDiffim
import lsst.ip.diffim.diffimTools as diffimTools

verbosity = 4
logUtils.traceSetAt("ip.diffim", verbosity)
Log.getLogger('psfMatch').setLevel(Log.INFO)

display = False

# known input images
try:
    defDataDir = lsst.utils.getPackageDir('afwdata')
except Exception:
    defDataDir = None


class DiffimTestCases(lsst.utils.tests.TestCase):

    # D = I - (K.x.T + bg)

    def setUp(self):
from lsst.meas.deblender.baseline import deblend
import lsst.meas.algorithms as measAlg

doPlot = False
if doPlot:
    import matplotlib
    matplotlib.use('Agg')
    import pylab as plt
    import os.path
    plotpat = os.path.join(os.path.dirname(__file__), 'stray%i.png')
    print('Writing plots to', plotpat)
else:
    print('"doPlot" not set -- not making plots.  To enable plots, edit', __file__)

# Lower the level to Log.DEBUG to see debug messages
Log.getLogger('meas.deblender.symmetrizeFootprint').setLevel(Log.INFO)


def imExt(img):
    bbox = img.getBBox()
    return [bbox.getMinX(), bbox.getMaxX(),
            bbox.getMinY(), bbox.getMaxY()]


def doubleGaussianPsf(W, H, fwhm1, fwhm2, a2):
    return measAlg.DoubleGaussianPsf(W, H, fwhm1, fwhm2, a2)


def gaussianPsf(W, H, fwhm):
    return measAlg.DoubleGaussianPsf(W, H, fwhm)
import math

import lsst.geom
import lsst.afw.detection as afwDetection
import lsst.afw.image as afwImage
import lsst.afw.math as afwMath
import lsst.afw.table as afwTable
from lsst.log import Log
import lsst.meas.base as measBase
import lsst.meas.algorithms as algorithms
import lsst.meas.algorithms.defects as defects
import lsst.pex.config as pexConfig
import lsst.utils.tests

# Change the level to Log.DEBUG or Log.TRACE to see debug messages
Log.getLogger("measurement").setLevel(Log.INFO)

try:
    type(display)
except NameError:
    display = False
else:
    import lsst.afw.display as afwDisplay
    afwDisplay.setDefaultMaskTransparency(75)

# Determine if we have afwdata
try:
    afwdataDir = lsst.utils.getPackageDir('afwdata')
except Exception:
    afwdataDir = None
def generateAlardLuptonBasisList(config, targetFwhmPix=None, referenceFwhmPix=None,
                                 basisDegGauss=None, metadata=None):
    """Generate an Alard-Lupton kernel basis based upon the Config and
    the input FWHM of the science and template images

    Parameters
    ----------
    config : TODO: DM-17458
        TODO: DM-17458
    targetFwhmPix : `float`, optional
        TODO: DM-17458
    referenceFwhmPix : `float`, optional
        TODO: DM-17458
    basisDegGauss : TODO: DM-17458, optional
        TODO: DM-17458
    metadata : TODO: DM-17458, optional
        TODO: DM-17458

    Returns
    -------
    TYPE
        TODO: DM-17458

    Raises
    ------
    RuntimeError
        TODO: DM-17458
    ValueError
        TODO: DM-17458
    """

    if config.kernelBasisSet != "alard-lupton":
        raise RuntimeError("Cannot generate %s basis within generateAlardLuptonBasisList" %
                           config.kernelBasisSet)

    kernelSize = config.kernelSize
    fwhmScaling = config.kernelSizeFwhmScaling
    basisNGauss = config.alardNGauss
    basisSigmaGauss = config.alardSigGauss
    basisGaussBeta = config.alardGaussBeta
    basisMinSigma = config.alardMinSig
    if basisDegGauss is None:
        basisDegGauss = config.alardDegGauss

    if len(basisDegGauss) != basisNGauss:
        raise ValueError("len(basisDegGauss) != basisNGauss : %d vs %d" % (len(basisDegGauss), basisNGauss))
    if len(basisSigmaGauss) != basisNGauss:
        raise ValueError("len(basisSigmaGauss) != basisNGauss : %d vs %d" %
                         (len(basisSigmaGauss), basisNGauss))
    if (kernelSize % 2) != 1:
        raise ValueError("Only odd-sized Alard-Lupton bases allowed")

    if (targetFwhmPix is None) or (referenceFwhmPix is None) or (not config.scaleByFwhm):
        if metadata is not None:
            metadata.add("ALBasisNGauss", basisNGauss)
            metadata.add("ALBasisDegGauss", basisDegGauss)
            metadata.add("ALBasisSigGauss", basisSigmaGauss)
            metadata.add("ALKernelSize", kernelSize)

        return diffimLib.makeAlardLuptonBasisList(kernelSize//2, basisNGauss, basisSigmaGauss, basisDegGauss)

    targetSigma = targetFwhmPix / sigma2fwhm
    referenceSigma = referenceFwhmPix / sigma2fwhm
    logger = Log.getLogger("lsst.ip.diffim.generateAlardLuptonBasisList")
    logger.debug("Generating matching bases for sigma %.2f pix -> %.2f pix", targetSigma, referenceSigma)

    # Modify the size of Alard Lupton kernels based upon the images FWHM
    #
    # Note the operation is : template.x.kernel = science
    #
    # Assuming the template and science image Psfs are Gaussians with
    # the Fwhm above, Fwhm_T **2 + Fwhm_K **2 = Fwhm_S **2
    #
    if targetSigma == referenceSigma:
        # Leave defaults as-is
        pass
    elif referenceSigma > targetSigma:
        # Normal convolution

        # First Gaussian has the sigma that comes from the convolution
        # of two Gaussians : Sig_S**2 = Sig_T**2 + Sig_K**2
        #
        # If it's larger than basisMinSigma * basisGaussBeta, make it the
        # second kernel.  Else make it the smallest kernel.  Unless
        # only 1 kernel is asked for.
        kernelSigma = np.sqrt(referenceSigma**2 - targetSigma**2)
        if kernelSigma < basisMinSigma:
            kernelSigma = basisMinSigma

        basisSigmaGauss = []
        if basisNGauss == 1:
            basisSigmaGauss.append(kernelSigma)
            nAppended = 1
        else:
            if (kernelSigma/basisGaussBeta) > basisMinSigma:
                basisSigmaGauss.append(kernelSigma/basisGaussBeta)
                basisSigmaGauss.append(kernelSigma)
                nAppended = 2
            else:
                basisSigmaGauss.append(kernelSigma)
                nAppended = 1

        # Any other Gaussians above basisNGauss=1 come from a scaling
        # relationship: Sig_i+1 / Sig_i = basisGaussBeta
        for i in range(nAppended, basisNGauss):
            basisSigmaGauss.append(basisSigmaGauss[-1]*basisGaussBeta)

        kernelSize = int(fwhmScaling * basisSigmaGauss[-1])
        kernelSize += 0 if kernelSize%2 else 1  # Make sure it's odd
        kernelSize = min(config.kernelSizeMax, max(kernelSize, config.kernelSizeMin))

    else:
        # Deconvolution; Define the progression of Gaussians using a
        # method to derive a deconvolution sum-of-Gaussians from it's
        # convolution counterpart.  Only use 3 since the algorithm
        # assumes 3 components.
        #
        # http://iopscience.iop.org/0266-5611/26/8/085002  Equation 40

        # Use specializations for deconvolution
        basisNGauss = config.alardNGaussDeconv
        basisMinSigma = config.alardMinSigDeconv

        kernelSigma = np.sqrt(targetSigma**2 - referenceSigma**2)
        if kernelSigma < basisMinSigma:
            kernelSigma = basisMinSigma

        basisSigmaGauss = []
        if (kernelSigma/basisGaussBeta) > basisMinSigma:
            basisSigmaGauss.append(kernelSigma/basisGaussBeta)
            basisSigmaGauss.append(kernelSigma)
            nAppended = 2
        else:
            basisSigmaGauss.append(kernelSigma)
            nAppended = 1

        for i in range(nAppended, basisNGauss):
            basisSigmaGauss.append(basisSigmaGauss[-1]*basisGaussBeta)

        kernelSize = int(fwhmScaling * basisSigmaGauss[-1])
        kernelSize += 0 if kernelSize%2 else 1  # Make sure it's odd
        kernelSize = min(config.kernelSizeMax, max(kernelSize, config.kernelSizeMin))

        # Now build a deconvolution set from these sigmas
        sig0 = basisSigmaGauss[0]
        sig1 = basisSigmaGauss[1]
        sig2 = basisSigmaGauss[2]
        basisSigmaGauss = []
        for n in range(1, 3):
            for j in range(n):
                sigma2jn = (n - j)*sig1**2
                sigma2jn += j * sig2**2
                sigma2jn -= (n + 1)*sig0**2
                sigmajn = np.sqrt(sigma2jn)
                basisSigmaGauss.append(sigmajn)

        basisSigmaGauss.sort()
        basisNGauss = len(basisSigmaGauss)
        basisDegGauss = [config.alardDegGaussDeconv for x in basisSigmaGauss]

    if metadata is not None:
        metadata.add("ALBasisNGauss", basisNGauss)
        metadata.add("ALBasisDegGauss", basisDegGauss)
        metadata.add("ALBasisSigGauss", basisSigmaGauss)
        metadata.add("ALKernelSize", kernelSize)

    return diffimLib.makeAlardLuptonBasisList(kernelSize//2, basisNGauss, basisSigmaGauss, basisDegGauss)
# see <http://www.lsstcorp.org/LegalNotices/>.
#
import math
import unittest

import numpy as np

import lsst.utils.tests
import lsst.geom
import lsst.afw.image as afwImage
import lsst.afw.math as afwMath
import lsst.afw.math.detail as mathDetail
from lsst.log import Log

# Change the level to Log.DEBUG to see debug messages
Log.getLogger("TRACE5.afw.math.convolve").setLevel(Log.INFO)

LocNameDict = {
    mathDetail.KernelImagesForRegion.BOTTOM_LEFT: "BOTTOM_LEFT",
    mathDetail.KernelImagesForRegion.BOTTOM_RIGHT: "BOTTOM_RIGHT",
    mathDetail.KernelImagesForRegion.TOP_LEFT: "TOP_LEFT",
    mathDetail.KernelImagesForRegion.TOP_RIGHT: "TOP_RIGHT",
}

NameLocDict = dict((name, loc) for (loc, name) in LocNameDict.items())


class KernelImagesForRegion(lsst.utils.tests.TestCase):

    def setUp(self):
        boxCorner = lsst.geom.Point2I(11, 50)
Ejemplo n.º 56
0
"""
import os
import unittest

import lsst.utils
import lsst.utils.tests
import lsst.geom
import lsst.afw.geom as afwGeom
import lsst.afw.image as afwImage
import lsst.afw.image.utils as imageUtils
import lsst.afw.math as afwMath
import lsst.pex.exceptions as pexExcept
from lsst.log import Log

# Change the level to Log.DEBUG to see debug messages
Log.getLogger("afw.image.Mask").setLevel(Log.INFO)
Log.getLogger("TRACE3.afw.math.warp").setLevel(Log.INFO)
Log.getLogger("TRACE4.afw.math.warp").setLevel(Log.INFO)

try:
    afwdataDir = lsst.utils.getPackageDir("afwdata")
except pexExcept.NotFoundError:
    afwdataDir = None
    dataDir = None
else:
    dataDir = os.path.join(afwdataDir, "data")
    originalExposureName = "medexp.fits"
    originalExposurePath = os.path.join(dataDir, originalExposureName)
    subExposureName = "medsub.fits"
    subExposurePath = os.path.join(dataDir, originalExposureName)
    originalFullExposureName = os.path.join(
Ejemplo n.º 57
0
from lsst.daf.persistence import Butler
from lsst.meas.algorithms import LoadIndexedReferenceObjectsTask
import lsst.afw.geom as afwGeom
import lsst.afw.table as afwTable
import lsst.afw.image as afwImage
import lsst.utils.tests
from lsst.utils import getPackageDir
from lsst.log import Log
from lsst.pipe.tasks.photoCal import PhotoCalTask, PhotoCalConfig
from lsst.pipe.tasks.colorterms import Colorterm, ColortermDict, ColortermLibrary

RefCatDir = os.path.join(getPackageDir("pipe_tasks"), "tests", "data", "sdssrefcat")

# Quiet down meas_astrom logging, so we can see PhotoCal logs better
Log.getLogger("LoadIndexedReferenceObjectsTask").setLevel(Log.WARN)

testColorterms = ColortermLibrary(data={
    "test*": ColortermDict(data={
        "g": Colorterm(primary="g", secondary="r", c0=0.00, c1=0.00),
        "r": Colorterm(primary="r", secondary="i", c0=0.00, c1=0.00, c2=0.00),
        "i": Colorterm(primary="i", secondary="z", c0=1.00, c1=0.00, c2=0.00),
        "z": Colorterm(primary="z", secondary="i", c0=0.00, c1=0.00, c2=0.00),
    })
})


def setup_module(module):
    lsst.utils.tests.init()

Ejemplo n.º 58
0
    def readSrc(self, dataRef):
        """Read source catalog etc for input dataRef

        The following are returned:
        Source catalog, matched list, and wcs will be read from 'src', 'srcMatch', and 'calexp_md',
        respectively.

        NOTE: If the detector has nQuarter%4 != 0 (i.e. it is rotated w.r.t the focal plane
              coordinate system), the (x, y) pixel values of the centroid slot for the source
              catalogs are rotated such that pixel (0, 0) is the LLC (i.e. the coordinate system
              expected by meas_mosaic).

        If color transformation information is given, it will be applied to the reference flux
        of the matched list.  The source catalog and matched list will be converted to measMosaic's
        Source and SourceMatch and returned.

        The number of 'Source's in each cell defined by config.cellSize will be limited to brightest
        config.nStarPerCell.
        """

        self.log = Log.getDefaultLogger()

        dataId = dataRef.dataId

        try:
            if not dataRef.datasetExists("src"):
                raise RuntimeError("no data for src %s" % (dataId))
            if not dataRef.datasetExists("calexp_md"):
                raise RuntimeError("no data for calexp_md %s" % (dataId))

            calexp_md = dataRef.get("calexp_md", immediate=True)
            detector = dataRef.get("camera")[dataRef.dataId["ccd"]]  # OK for HSC; maybe not for other cameras
            wcs = afwGeom.makeSkyWcs(calexp_md)
            nQuarter = detector.getOrientation().getNQuarter()
            sources = dataRef.get("src", immediate=True, flags=afwTable.SOURCE_IO_NO_FOOTPRINTS)

            # Check if we are looking at HSC stack outputs: if so, no pixel rotation of sources is
            # required, but alias mapping must be set to associate HSC's schema with that of LSST.
            hscRun = mosaicUtils.checkHscStack(calexp_md)
            if hscRun is None:
                if nQuarter%4 != 0:
                    dims = afwImage.bboxFromMetadata(calexp_md).getDimensions()
                    sources = mosaicUtils.rotatePixelCoords(sources, dims.getX(), dims.getY(),
                                                            nQuarter)

            # Set some alias maps for the source catalog where needed for
            # backwards compatibility
            if self.config.srcSchemaMap and hscRun:
                aliasMap = sources.schema.getAliasMap()
                for lsstName, otherName in self.config.srcSchemaMap.items():
                    aliasMap.set(lsstName, otherName)
            if self.config.flagsToAlias and "calib_psfUsed" in sources.schema:
                aliasMap = sources.schema.getAliasMap()
                for lsstName, otherName in self.config.flagsToAlias.items():
                    aliasMap.set(lsstName, otherName)

            refObjLoader = self.config.loadAstrom.apply(butler=dataRef.getButler())
            srcMatch = dataRef.get("srcMatch", immediate=True)
            if hscRun is not None:
                # The reference object loader grows the bbox by the config parameter pixelMargin.  This
                # is set to 50 by default but is not reflected by the radius parameter set in the
                # metadata, so some matches may reside outside the circle searched within this radius
                # Thus, increase the radius set in the metadata fed into joinMatchListWithCatalog() to
                # accommodate.
                matchmeta = srcMatch.table.getMetadata()
                rad = matchmeta.getDouble("RADIUS")
                matchmeta.setDouble("RADIUS", rad*1.05, "field radius in degrees, approximate, padded")
            matches = refObjLoader.joinMatchListWithCatalog(srcMatch, sources)

            # Set the aliap map for the matched sources (i.e. the [1] attribute schema for each match)
            if self.config.srcSchemaMap is not None and hscRun is not None:
                for mm in matches:
                    aliasMap = mm[1].schema.getAliasMap()
                    for lsstName, otherName in self.config.srcSchemaMap.items():
                        aliasMap.set(lsstName, otherName)

            if hscRun is not None:
                for slot in ("PsfFlux", "ModelFlux", "ApFlux", "GaussianFlux", "Centroid", "Shape"):
                    getattr(matches[0][1].getTable(), "define" + slot)(
                        getattr(sources, "get" + slot + "Definition")())
                    # For some reason, the CalibFlux slot in sources is coming up as centroid_sdss, so
                    # set it to flux_naive explicitly
                    for slot in ("CalibFlux", ):
                        getattr(matches[0][1].getTable(), "define" + slot)("flux_naive")
            matches = [m for m in matches if m[0] is not None]
            refSchema = matches[0][0].schema if matches else None

            if self.cterm is not None and len(matches) != 0:
                # Add a "flux" field to the input schema of the first element
                # of the match and populate it with a colorterm correct flux.
                mapper = afwTable.SchemaMapper(refSchema)
                for key, field in refSchema:
                    mapper.addMapping(key)
                fluxKey = mapper.editOutputSchema().addField("flux", type=float, doc="Reference flux")
                fluxErrKey = mapper.editOutputSchema().addField("fluxErr", type=float,
                                                                  doc="Reference flux uncertainty")
                table = afwTable.SimpleTable.make(mapper.getOutputSchema())
                table.preallocate(len(matches))
                for match in matches:
                    newMatch = table.makeRecord()
                    newMatch.assign(match[0], mapper)
                    match[0] = newMatch

                # extract the matched refCat as a Catalog for the colorterm code
                refCat = afwTable.SimpleCatalog(matches[0].first.schema)
                refCat.reserve(len(matches))
                for x in matches:
                    record = refCat.addNew()
                    record.assign(x.first)

                refMag, refMagErr = self.cterm.getCorrectedMagnitudes(refCat,
                                                                      afwImage.Filter(calexp_md).getName())
                # NOTE: mosaic assumes fluxes are in Jy
                refFlux = (refMag*astropy.units.ABmag).to_value(astropy.units.Jy)
                refFluxErr = afwImage.fluxErrFromABMagErr(refMagErr, refMag)
                matches = [self.setCatFlux(m, flux, fluxKey, fluxErr, fluxErrKey) for
                           m, flux, fluxErr in zip(matches, refFlux, refFluxErr) if flux == flux]
            else:
                filterName = afwImage.Filter(calexp_md).getName()
                refFluxField = measAlg.getRefFluxField(refSchema, filterName)
                refSchema.getAliasMap().set("flux", refFluxField)

            # LSST reads in a_net catalogs with flux in "janskys", so must convert back to DN.
            matches = mosaicUtils.matchJanskyToDn(matches)

            selSources = self.selectStars(sources, self.config.includeSaturated)
            selMatches = self.selectStars(matches, self.config.includeSaturated)

            retSrc = list()
            retMatch = list()

            if len(selMatches) > self.config.minNumMatch:
                naxis1, naxis2 = afwImage.bboxFromMetadata(calexp_md).getDimensions()
                if hscRun is None:
                    if nQuarter%2 != 0:
                        naxis1, naxis2 = naxis2, naxis1
                bbox = afwGeom.Box2I(afwGeom.Point2I(0, 0), afwGeom.Extent2I(naxis1, naxis2))
                cellSet = afwMath.SpatialCellSet(bbox, self.config.cellSize, self.config.cellSize)
                for s in selSources:
                    if numpy.isfinite(s.getRa().asDegrees()): # get rid of NaN
                        src = measMosaic.Source(s)
                        src.setExp(dataId["visit"])
                        src.setChip(dataId["ccd"])
                        try:
                            tmp = measMosaic.SpatialCellSource(src)
                            cellSet.insertCandidate(tmp)
                        except:
                            self.log.info("FAILED TO INSERT CANDIDATE: visit=%d ccd=%d x=%f y=%f" %
                                          (dataRef.dataId["visit"], dataRef.dataId["ccd"],
                                           src.getX(), src.getY()) + " bbox=" + str(bbox))
                for cell in cellSet.getCellList():
                    cell.sortCandidates()
                    for i, cand in enumerate(cell):
                        src = cand.getSource()
                        retSrc.append(src)
                        if i == self.config.nStarPerCell - 1:
                            break
                for m in selMatches:
                    if m[0] is not None and m[1] is not None:
                        match = (measMosaic.Source(m[0], wcs), measMosaic.Source(m[1]))
                        match[1].setExp(dataId["visit"])
                        match[1].setChip(dataId["ccd"])
                        retMatch.append(match)
            else:
                self.log.info("%8d %3d : %d/%d matches  Suspicious to wrong match. Ignore this CCD" %
                              (dataRef.dataId["visit"], dataRef.dataId["ccd"], len(selMatches), len(matches)))

        except Exception as e:
            self.log.warn("Failed to read %s: %s" % (dataId, e))
            return dataId, [None, None, None]

        return dataId, [retSrc, retMatch, wcs]
Ejemplo n.º 59
0
import lsst.utils
import lsst.utils.tests
import lsst.afw.geom as afwGeom
import lsst.afw.image as afwImage
import lsst.afw.math as afwMath
import lsst.afw.display.ds9 as ds9
import lsst.pex.exceptions as pexExcept
import lsst.coadd.utils as coaddUtils
from lsst.log import Log

try:
    display
except NameError:
    display = False

Log.getLogger("coadd.utils").setLevel(Log.INFO)

try:
    AfwdataDir = lsst.utils.getPackageDir('afwdata')
except Exception:
    AfwdataDir = None
# path to a medium-sized MaskedImage, relative to afwdata package root
MedMiSubpath = os.path.join("data", "med.fits")


def slicesFromBox(box, image):
    """Computes the numpy slice in x and y associated with a parent bounding box
    given an image/maskedImage/exposure
    """
    startInd = (box.getMinX() - image.getX0(), box.getMinY() - image.getY0())
    stopInd = (startInd[0] + box.getWidth(), startInd[1] + box.getHeight())