Beispiel #1
0
    def stripMetadata(self):
        """Remove metadata entries that are parsed into components.

        This is only called when just the metadata is requested; stripping
        entries there forces code that wants other components to ask for those
        components directly rather than trying to extract them from the
        metadata manually, which is fragile.  This behavior is an intentional
        change from Gen2.

        Parameters
        ----------
        metadata : `~lsst.daf.base.PropertyList`
            Header metadata, to be modified in-place.
        """
        # TODO: make sure this covers everything, by delegating to something
        # that doesn't yet exist in afw.image.ExposureInfo.
        from lsst.afw.image import bboxFromMetadata
        from lsst.afw.geom import makeSkyWcs

        # Protect against the metadata being missing
        try:
            bboxFromMetadata(self.metadata)  # always strips
        except LookupError:
            pass
        try:
            makeSkyWcs(self.metadata, strip=True)
        except Exception:
            pass
Beispiel #2
0
    def writeNewWcs(self, dataRefList):
        self.log.info("Write New WCS ...")
        for dataRef in dataRefList:
            iexp = dataRef.dataId["visit"]
            ichip = dataRef.dataId["ccd"]
            c = measMosaic.convertCoeff(self.coeffSet[iexp],
                                        self.ccdSet[ichip])
            wcs = measMosaic.wcsFromCoeff(c)
            calexp_md = dataRef.get("calexp_md", immediate=True)
            hscRun = mosaicUtils.checkHscStack(calexp_md)
            if hscRun is None:
                detector = dataRef.get("camera")[dataRef.dataId["ccd"]]
                nQuarter = detector.getOrientation().getNQuarter()
                if nQuarter % 4 != 0:
                    dimensions = afwImage.bboxFromMetadata(
                        calexp_md).getDimensions()
                    if nQuarter % 2 != 0:
                        dimensions = afwGeom.Extent2I(dimensions.getY(),
                                                      dimensions.getX())
                    wcs = measAstrom.rotateWcsPixelsBy90(
                        wcs, 4 - nQuarter, dimensions)

            try:
                dataRef.put(wcs, "jointcal_wcs")
            except Exception as e:
                print("failed to write wcs: %s" % (e))
Beispiel #3
0
 def _createSkyWcsFromMetadata(self):
     # We need to know which direction the chip is "flipped" in order to
     # make a sensible WCS from the header metadata.
     wcs = makeSkyWcs(self.metadata, strip=True)
     dimensions = bboxFromMetadata(self.metadata).getDimensions()
     center = Point2D(dimensions/2.0)
     return wcs #makeFlippedWcs(wcs, self.FLIP_LR, self.FLIP_TB, center)
 def makeDataRefList(self, namespace):
     """Add a dataList containing useful information for selecting images"""
     super(SelectDataIdContainer, self).makeDataRefList(namespace)
     self.dataList = []
     for ref in self.refList:
         try:
             md = ref.get("calexp_md", immediate=True)
             wcs = afwGeom.makeSkyWcs(md)
             data = SelectStruct(dataRef=ref, wcs=wcs, bbox=afwImage.bboxFromMetadata(md))
         except FitsError:
             namespace.log.warn("Unable to construct Wcs from %s" % (ref.dataId))
             continue
         self.dataList.append(data)
Beispiel #5
0
 def makeDataRefList(self, namespace):
     """Add a dataList containing useful information for selecting images"""
     super(SelectDataIdContainer, self).makeDataRefList(namespace)
     self.dataList = []
     for ref in self.refList:
         try:
             md = ref.get("calexp_md", immediate=True)
             wcs = afwGeom.makeSkyWcs(md)
             data = SelectStruct(dataRef=ref, wcs=wcs, bbox=afwImage.bboxFromMetadata(md))
         except FitsError as e:
             namespace.log.warn("Unable to construct Wcs from %s" % (ref.dataId))
             continue
         self.dataList.append(data)
Beispiel #6
0
 def insertObservationRegions(self, registry, datastore):
     """Add spatial regions for visit-detector combinations.
     """
     sql = (
         "SELECT wcs.instrument AS instrument, wcs.visit AS visit, wcs.detector AS detector, "
         "        wcs.dataset_id AS wcs, metadata.dataset_id AS metadata "
         "    FROM dataset wcs "
         "        INNER JOIN dataset_collection wcs_collection "
         "            ON (wcs.dataset_id = wcs_collection.dataset_id) "
         "        INNER JOIN dataset metadata "
         "            ON (wcs.instrument = metadata.instrument "
         "                AND wcs.visit = metadata.visit "
         "                AND wcs.detector = metadata.detector) "
         "        INNER JOIN dataset_collection metadata_collection "
         "            ON (metadata.dataset_id = metadata_collection.dataset_id) "
         "    WHERE wcs_collection.collection = :collection "
         "          AND metadata_collection.collection = :collection "
         "          AND wcs.dataset_type_name = :wcs_name"
         "          AND metadata.dataset_type_name = :metadata_name")
     log = Log.getLogger("lsst.daf.butler.gen2convert")
     for config in self.config["regions"]:
         log.info("Adding observation regions using %s from %s.",
                  config["DatasetType"], config["collection"])
         visits = {}
         for row in registry.query(
                 sql,
                 collection=config["collection"],
                 wcs_name="{}.wcs".format(config["DatasetType"]),
                 metadata_name="{}.metadata".format(config["DatasetType"])):
             wcsRef = registry.getDataset(row["wcs"])
             metadataRef = registry.getDataset(row["metadata"])
             wcs = datastore.get(wcsRef)
             metadata = datastore.get(metadataRef)
             bbox = Box2D(bboxFromMetadata(metadata))
             bbox.grow(config["padding"])
             region = ConvexPolygon([
                 sp.getVector() for sp in wcs.pixelToSky(bbox.getCorners())
             ])
             registry.setDimensionRegion(
                 {k: row[k]
                  for k in ("instrument", "visit", "detector")},
                 region=region,
                 update=False)
             visits.setdefault((row["instrument"], row["visit"]),
                               []).extend(region.getVertices())
         for (instrument, visit), vertices in visits.items():
             region = ConvexPolygon(vertices)
             registry.setDimensionRegion(instrument=instrument,
                                         visit=visit,
                                         region=region)
Beispiel #7
0
    def checkOverlapWithTract(self, tractInfo, dataRefList, verbose=False):
        dataRefListExists = list()
        dataRefListOverlapWithTract = list()
        tractBBox = afwGeom.Box2D(tractInfo.getBBox())
        tractWcs = tractInfo.getWcs()
        for dataRef in dataRefList:
            try:
                if not dataRef.datasetExists("calexp_md"):
                    raise RuntimeError("no data for calexp_md %s" %
                                       (dataRef.dataId))
                md = dataRef.get("calexp_md", immediate=True)
                wcs = afwGeom.makeSkyWcs(md)

                dataRefListExists.append(dataRef)

                if self.config.requireTractOverlap:
                    naxis1, naxis2 = afwImage.bboxFromMetadata(
                        md).getDimensions()
                    bbox = afwGeom.Box2D(
                        afwGeom.Box2I(afwGeom.Point2I(0, 0),
                                      afwGeom.Extent2I(naxis1, naxis2)))
                    overlap = False
                    for corner in bbox.getCorners():
                        if tractBBox.contains(
                                tractWcs.skyToPixel(wcs.pixelToSky(corner))):
                            overlap = True
                            break
                    if overlap:
                        dataRefListOverlapWithTract.append(dataRef)
                    else:  # when there's no break i.e. no corner was in the tract
                        if verbose:
                            self.log.warn(
                                "Image %s does not overlap tract %s" %
                                (dataRef.dataId, tractInfo.getId()))
                else:
                    dataRefListOverlapWithTract.append(dataRef)
            except Exception as e:
                print(e)

        visitListOverlapWithTract = list(
            set([d.dataId["visit"] for d in dataRefListOverlapWithTract]))

        dataRefListToUse = list()
        for dataRef in dataRefListExists:
            if dataRef.dataId["visit"] in visitListOverlapWithTract:
                dataRefListToUse.append(dataRef)

        return dataRefListOverlapWithTract, dataRefListToUse
    def std_raw_md(self, md, dataId):
        if False:            # no std_raw_md in baseclass
            md = super(HscMapper, self).std_raw_md(md, dataId)  # not present in baseclass
        #
        # We need to flip the WCS defined by the metadata in case anyone ever constructs a Wcs from it
        #
        wcs = afwGeom.makeSkyWcs(md)
        wcs = self._flipChipsLR(None, wcs, dataId, dims=afwImage.bboxFromMetadata(md).getDimensions())[1]
        wcsR = afwGeom.makeSkyWcs(crpix=wcs.getPixelOrigin(),
                                  crval=wcs.getSkyOrigin(),
                                  cdMatrix=wcs.getCdMatrix()*0.992)
        wcsMd = wcsR.getFitsMetadata()

        for k in wcsMd.names():
            md.set(k, wcsMd.getScalar(k))

        return md
Beispiel #9
0
    def std_raw_md(self, md, dataId):
        """We need to flip the WCS defined by the metadata in case anyone ever
        constructs a Wcs from it.
        """
        wcs = afwGeom.makeSkyWcs(md)
        wcs = self._flipChipsLR(None, wcs, dataId['ccd'],
                                dims=afwImage.bboxFromMetadata(md).getDimensions())[1]
        # NOTE: we don't know where the 0.992 magic constant came from. It was copied over from hscSimMapper.
        wcsR = afwGeom.makeSkyWcs(crpix=wcs.getPixelOrigin(),
                                  crval=wcs.getSkyOrigin(),
                                  cdMatrix=wcs.getCdMatrix()*0.992)
        wcsMd = wcsR.getFitsMetadata()

        for k in wcsMd.names():
            md.set(k, wcsMd.getScalar(k))

        return md
    def checkOverlapWithTract(self, tractInfo, dataRefList, verbose=False):
        dataRefListExists = list()
        dataRefListOverlapWithTract = list()
        tractBBox = afwGeom.Box2D(tractInfo.getBBox())
        tractWcs = tractInfo.getWcs()
        for dataRef in dataRefList:
            try:
                if not dataRef.datasetExists("calexp_md"):
                    raise RuntimeError("no data for calexp_md %s" % (dataRef.dataId))
                md = dataRef.get("calexp_md", immediate=True)
                wcs = afwGeom.makeSkyWcs(md)

                dataRefListExists.append(dataRef)

                if self.config.requireTractOverlap:
                    naxis1, naxis2 = afwImage.bboxFromMetadata(md).getDimensions()
                    bbox = afwGeom.Box2D(afwGeom.Box2I(
                            afwGeom.Point2I(0, 0), afwGeom.Extent2I(naxis1, naxis2)))
                    overlap = False
                    for corner in bbox.getCorners():
                        if tractBBox.contains(tractWcs.skyToPixel(wcs.pixelToSky(corner))):
                            overlap = True
                            break
                    if overlap:
                        dataRefListOverlapWithTract.append(dataRef)
                    else:  # when there's no break i.e. no corner was in the tract
                        if verbose:
                            self.log.warn("Image %s does not overlap tract %s" %
                                          (dataRef.dataId, tractInfo.getId()))
                else:
                    dataRefListOverlapWithTract.append(dataRef)
            except Exception as e:
                print(e)

        visitListOverlapWithTract = list(set([d.dataId["visit"] for d in dataRefListOverlapWithTract]))

        dataRefListToUse = list()
        for dataRef in dataRefListExists:
            if dataRef.dataId["visit"] in visitListOverlapWithTract:
                dataRefListToUse.append(dataRef)

        return dataRefListOverlapWithTract, dataRefListToUse
    def readSelection(self, cache, selectId):
        """!Read Wcs of selected inputs

        This method only runs on slave nodes.
        This method is similar to SelectDataIdContainer.makeDataRefList,
        creating a Struct like a SelectStruct, except with a dataId instead
        of a dataRef (to ease MPI).

        @param cache: Pool cache
        @param selectId: Data identifier for selected input
        @return a SelectStruct with a dataId instead of dataRef
        """
        try:
            ref = getDataRef(cache.butler, selectId, "calexp")
            self.log.info("Reading Wcs from %s" % (selectId,))
            md = ref.get("calexp_md", immediate=True)
            wcs = afwGeom.makeSkyWcs(md)
            data = Struct(dataId=selectId, wcs=wcs, bbox=afwImage.bboxFromMetadata(md))
        except FitsError:
            self.log.warn("Unable to construct Wcs from %s" % (selectId,))
            return None
        return data
    def writeNewWcs(self, dataRefList):
        self.log.info("Write New WCS ...")
        for dataRef in dataRefList:
            iexp = dataRef.dataId["visit"]
            ichip = dataRef.dataId["ccd"]
            c = measMosaic.convertCoeff(self.coeffSet[iexp], self.ccdSet[ichip]);
            wcs = measMosaic.wcsFromCoeff(c);
            calexp_md = dataRef.get("calexp_md", immediate=True)
            hscRun = mosaicUtils.checkHscStack(calexp_md)
            if hscRun is None:
                detector = dataRef.get("camera")[dataRef.dataId["ccd"]]
                nQuarter = detector.getOrientation().getNQuarter()
                if nQuarter%4 != 0:
                    dimensions = afwImage.bboxFromMetadata(calexp_md).getDimensions()
                    if nQuarter%2 != 0:
                        dimensions = afwGeom.Extent2I(dimensions.getY(), dimensions.getX())
                    wcs = measAstrom.rotateWcsPixelsBy90(wcs, 4 - nQuarter, dimensions)

            try:
                dataRef.put(wcs, "jointcal_wcs")
            except Exception as e:
                print("failed to write wcs: %s" % (e))
Beispiel #13
0
    def readSrc(self, dataRef):
        """Read source catalog etc for input dataRef

        The following are returned:
        Source catalog, matched list, and wcs will be read from 'src', 'srcMatch', and 'calexp_md',
        respectively.

        NOTE: If the detector has nQuarter%4 != 0 (i.e. it is rotated w.r.t the focal plane
              coordinate system), the (x, y) pixel values of the centroid slot for the source
              catalogs are rotated such that pixel (0, 0) is the LLC (i.e. the coordinate system
              expected by meas_mosaic).

        If color transformation information is given, it will be applied to the reference flux
        of the matched list.  The source catalog and matched list will be converted to measMosaic's
        Source and SourceMatch and returned.

        The number of 'Source's in each cell defined by config.cellSize will be limited to brightest
        config.nStarPerCell.
        """

        self.log = Log.getDefaultLogger()

        dataId = dataRef.dataId

        try:
            if not dataRef.datasetExists("src"):
                raise RuntimeError("no data for src %s" % (dataId))
            if not dataRef.datasetExists("calexp_md"):
                raise RuntimeError("no data for calexp_md %s" % (dataId))

            calexp_md = dataRef.get("calexp_md", immediate=True)
            detector = dataRef.get("camera")[dataRef.dataId[
                "ccd"]]  # OK for HSC; maybe not for other cameras
            wcs = afwGeom.makeSkyWcs(calexp_md)
            nQuarter = detector.getOrientation().getNQuarter()
            sources = dataRef.get("src",
                                  immediate=True,
                                  flags=afwTable.SOURCE_IO_NO_FOOTPRINTS)

            # Check if we are looking at HSC stack outputs: if so, no pixel rotation of sources is
            # required, but alias mapping must be set to associate HSC's schema with that of LSST.
            hscRun = mosaicUtils.checkHscStack(calexp_md)
            if hscRun is None:
                if nQuarter % 4 != 0:
                    dims = afwImage.bboxFromMetadata(calexp_md).getDimensions()
                    sources = mosaicUtils.rotatePixelCoords(
                        sources, dims.getX(), dims.getY(), nQuarter)

            # Set the aliap map for the source catalog
            if self.config.srcSchemaMap is not None and hscRun is not None:
                aliasMap = sources.schema.getAliasMap()
                for lsstName, otherName in self.config.srcSchemaMap.items():
                    aliasMap.set(lsstName, otherName)

            refObjLoader = self.config.loadAstrom.apply(
                butler=dataRef.getButler())
            srcMatch = dataRef.get("srcMatch", immediate=True)
            if hscRun is not None:
                # The reference object loader grows the bbox by the config parameter pixelMargin.  This
                # is set to 50 by default but is not reflected by the radius parameter set in the
                # metadata, so some matches may reside outside the circle searched within this radius
                # Thus, increase the radius set in the metadata fed into joinMatchListWithCatalog() to
                # accommodate.
                matchmeta = srcMatch.table.getMetadata()
                rad = matchmeta.getDouble("RADIUS")
                matchmeta.setDouble(
                    "RADIUS", rad * 1.05,
                    "field radius in degrees, approximate, padded")
            matches = refObjLoader.joinMatchListWithCatalog(srcMatch, sources)

            # Set the aliap map for the matched sources (i.e. the [1] attribute schema for each match)
            if self.config.srcSchemaMap is not None and hscRun is not None:
                for mm in matches:
                    aliasMap = mm[1].schema.getAliasMap()
                    for lsstName, otherName in self.config.srcSchemaMap.items(
                    ):
                        aliasMap.set(lsstName, otherName)

            if hscRun is not None:
                for slot in ("PsfFlux", "ModelFlux", "ApFlux", "InstFlux",
                             "Centroid", "Shape"):
                    getattr(matches[0][1].getTable(), "define" + slot)(getattr(
                        sources, "get" + slot + "Definition")())
                    # For some reason, the CalibFlux slot in sources is coming up as centroid_sdss, so
                    # set it to flux_naive explicitly
                    for slot in ("CalibFlux", ):
                        getattr(matches[0][1].getTable(),
                                "define" + slot)("flux_naive")
            matches = [m for m in matches if m[0] is not None]
            refSchema = matches[0][0].schema if matches else None

            if self.cterm is not None and len(matches) != 0:
                # Add a "flux" field to the input schema of the first element
                # of the match and populate it with a colorterm correct flux.
                mapper = afwTable.SchemaMapper(refSchema)
                for key, field in refSchema:
                    mapper.addMapping(key)
                fluxKey = mapper.editOutputSchema().addField(
                    "flux", type=float, doc="Reference flux")
                fluxSigmaKey = mapper.editOutputSchema().addField(
                    "fluxSigma", type=float, doc="Reference flux uncertainty")
                table = afwTable.SimpleTable.make(mapper.getOutputSchema())
                table.preallocate(len(matches))
                for match in matches:
                    newMatch = table.makeRecord()
                    newMatch.assign(match[0], mapper)
                    match[0] = newMatch
                primaryFluxKey = refSchema.find(
                    refSchema.join(self.cterm.primary, "flux")).key
                secondaryFluxKey = refSchema.find(
                    refSchema.join(self.cterm.secondary, "flux")).key
                primaryFluxSigmaKey = refSchema.find(
                    refSchema.join(self.cterm.primary, "fluxSigma")).key
                secondaryFluxSigmaKey = refSchema.find(
                    refSchema.join(self.cterm.secondary, "fluxSigma")).key
                refFlux1 = numpy.array(
                    [m[0].get(primaryFluxKey) for m in matches])
                refFlux2 = numpy.array(
                    [m[0].get(secondaryFluxKey) for m in matches])
                refFluxSigma1 = numpy.array(
                    [m[0].get(primaryFluxSigmaKey) for m in matches])
                refFluxSigma2 = numpy.array(
                    [m[0].get(secondaryFluxSigmaKey) for m in matches])
                refMag1 = -2.5 * numpy.log10(refFlux1)
                refMag2 = -2.5 * numpy.log10(refFlux2)
                refMag = self.cterm.transformMags(refMag1, refMag2)
                refFlux = numpy.power(10.0, -0.4 * refMag)
                refFluxSigma = self.cterm.propagateFluxErrors(
                    refFluxSigma1, refFluxSigma2)
                matches = [
                    self.setCatFlux(m, flux, fluxKey, fluxSigma, fluxSigmaKey)
                    for m, flux, fluxSigma in zip(matches, refFlux,
                                                  refFluxSigma) if flux == flux
                ]
            else:
                filterName = afwImage.Filter(calexp_md).getName()
                refFluxField = measAlg.getRefFluxField(refSchema, filterName)
                refSchema.getAliasMap().set("flux", refFluxField)

            # LSST reads in a_net catalogs with flux in "janskys", so must convert back to DN.
            matches = mosaicUtils.matchJanskyToDn(matches)

            selSources = self.selectStars(sources,
                                          self.config.includeSaturated)
            selMatches = self.selectStars(matches,
                                          self.config.includeSaturated)

            retSrc = list()
            retMatch = list()

            if len(selMatches) > self.config.minNumMatch:
                naxis1, naxis2 = afwImage.bboxFromMetadata(
                    calexp_md).getDimensions()
                if hscRun is None:
                    if nQuarter % 2 != 0:
                        naxis1, naxis2 = naxis2, naxis1
                bbox = afwGeom.Box2I(afwGeom.Point2I(0, 0),
                                     afwGeom.Extent2I(naxis1, naxis2))
                cellSet = afwMath.SpatialCellSet(bbox, self.config.cellSize,
                                                 self.config.cellSize)
                for s in selSources:
                    if numpy.isfinite(s.getRa().asDegrees()):  # get rid of NaN
                        src = measMosaic.Source(s)
                        src.setExp(dataId["visit"])
                        src.setChip(dataId["ccd"])
                        try:
                            tmp = measMosaic.SpatialCellSource(src)
                            cellSet.insertCandidate(tmp)
                        except:
                            self.log.info(
                                "FAILED TO INSERT CANDIDATE: visit=%d ccd=%d x=%f y=%f"
                                % (dataRef.dataId["visit"], dataRef.
                                   dataId["ccd"], src.getX(), src.getY()) +
                                " bbox=" + str(bbox))
                for cell in cellSet.getCellList():
                    cell.sortCandidates()
                    for i, cand in enumerate(cell):
                        src = cand.getSource()
                        retSrc.append(src)
                        if i == self.config.nStarPerCell - 1:
                            break
                for m in selMatches:
                    if m[0] is not None and m[1] is not None:
                        match = (measMosaic.Source(m[0], wcs),
                                 measMosaic.Source(m[1]))
                        match[1].setExp(dataId["visit"])
                        match[1].setChip(dataId["ccd"])
                        retMatch.append(match)
            else:
                self.log.info(
                    "%8d %3d : %d/%d matches  Suspicious to wrong match. Ignore this CCD"
                    % (dataRef.dataId["visit"], dataRef.dataId["ccd"],
                       len(selMatches), len(matches)))

        except Exception as e:
            self.log.warn("Failed to read %s: %s" % (dataId, e))
            return dataId, [None, None, None]

        return dataId, [retSrc, retMatch, wcs]
Beispiel #14
0
    def index(self, exposure_or_metadata, data_id, database):
        """Spatially index an |exposure| or |metadata| object.

        Parameters
        ----------

        exposure_or_metadata : lsst.afw.image.Exposure[DFILU] or lsst.daf.base.PropertySet
            An afw |exposure| or corresponding |metadata| object.

        data_id : object
            An object identifying a single exposure (e.g. as used by the
            butler). It must be possible to pickle `data_id`.

        database : sqlite3.Connection or str
            A connection to (or filename of) a SQLite 3 database.

        Returns
        -------

        ``None``, unless the |defer_writes| coniguration parameter is ``True``.
        In that case, an :class:`.ExposureInfo` object containing a pickled
        data-id and an |encoded| |polygon| is returned.
        """
        # Get a pixel index bounding box for the exposure.
        if isinstance(exposure_or_metadata, daf_base.PropertySet):
            md = exposure_or_metadata
            # Map (LTV1, LTV2) to LSST (x0, y0). LSST convention says that
            # (x0, y0) is the location of the sub-image origin (the bottom-left
            # corner) relative to the origin of the parent, whereas LTVi encode
            # the origin of the parent relative to the origin of the subimage.
            pixel_bbox = afw_image.bboxFromMetadata(md)
            wcs = afw_image.makeWcs(md, False)
        else:
            pixel_bbox = exposure_or_metadata.getBBox()
            wcs = exposure_or_metadata.getWcs()
        # Pad the box by a configurable amount and bail if the result is empty.
        pixel_bbox.grow(self.config.pad_pixels)
        if pixel_bbox.isEmpty():
            self.log.warn("skipping exposure indexing for dataId=%s: "
                          "empty bounding box", data_id)
            return
        corners = []
        for c in pixel_bbox.getCorners():
            # Convert the box corners from pixel indexes to pixel positions,
            # and then to sky coordinates.
            c = wcs.pixelToSky(afw_image.indexToPosition(c.getX()),
                               afw_image.indexToPosition(c.getY()))
            c = (c.getLongitude().asRadians(), c.getLatitude().asRadians())
            # Bail if any coordinate is not finite.
            if any(math.isinf(x) or math.isnan(x) for x in c):
                self.log.warn("skipping exposure indexing for dataId=%s: "
                              "NaN or Inf in bounding box sky coordinate(s)"
                              " - bad WCS?", data_id)
                return
            # Convert from sky coordinates to unit vectors.
            corners.append(UnitVector3d(Angle.fromRadians(c[0]),
                                        Angle.fromRadians(c[1])))
        # Create a convex polygon containing the exposure pixels. When sphgeom
        # gains support for non-convex polygons, this could be changed to map
        # exposure.getPolygon() to a spherical equivalent, or to subdivide box
        # edges in pixel space to account for non linear projections. This
        # would have higher accuracy than the current approach of connecting
        # corner sky coordinates with great circles.
        poly = ConvexPolygon(corners)
        # Finally, persist or return the exposure information.
        info = ExposureInfo(pickle.dumps(data_id), poly.encode())
        if self.config.defer_writes:
            return info
        store_exposure_info(database, self.config.allow_replace, info)
 def makeWcs(self, metadata):
     wcs = makeSkyWcs(metadata, strip=True)
     dimensions = bboxFromMetadata(metadata).getDimensions()
     center = Point2D(dimensions/2.0)
     return makeFlippedWcs(wcs, self.FLIP_LR, self.FLIP_TB, center)
Beispiel #16
0
    def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None, use_default=True):
        """Initialize mappings
        For each of the dataset types that we want to be able to read, there are
        methods that can be created to support them:
        * map_<dataset> : determine the path for dataset
        * std_<dataset> : standardize the retrieved dataset
        * bypass_<dataset> : retrieve the dataset (bypassing the usual retrieval machinery)
        * query_<dataset> : query the registry
        Besides the dataset types explicitly listed in the policy, we create
        additional, derived datasets for additional conveniences, e.g., reading
        the header of an image, retrieving only the size of a catalog.
        Parameters
        ----------
        policy : `lsst.daf.persistence.Policy`
            Policy with per-camera defaults already merged
        rootStorage : `Storage subclass instance`
            Interface to persisted repository data.
        calibRoot : `Storage subclass instance`
            Interface to persisted calib repository data
        provided : `list` of `str`
            Keys provided by the mapper
        use_default : `bool`
            Load default camera mappings
        """
        # Sub-dictionaries (for exposure/calibration/dataset types)
        imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
            "obs_base", "ImageMappingDictionary.paf", "policy"))
        expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
            "obs_base", "ExposureMappingDictionary.paf", "policy"))
        calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
            "obs_base", "CalibrationMappingDictionary.paf", "policy"))
        dsMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
            "obs_base", "DatasetMappingDictionary.paf", "policy"))

        # Mappings
        mappingList = (
            ("images", imgMappingPolicy, ImageMapping),
            ("exposures", expMappingPolicy, ExposureMapping),
            ("calibrations", calMappingPolicy, CalibrationMapping),
            ("datasets", dsMappingPolicy, DatasetMapping)
        )
        self.mappings = dict()
        for name, defPolicy, cls in mappingList:
            if name in policy:
                datasets = policy[name]

                # Centrally-defined datasets
                defaultsPath = os.path.join(getPackageDir("obs_base"), "policy", name + ".yaml")
                if os.path.exists(defaultsPath) and use_default:
                    datasets.merge(dafPersist.Policy(defaultsPath))

                mappings = dict()
                setattr(self, name, mappings)
                for datasetType in datasets.names(True):
                    subPolicy = datasets[datasetType]
                    subPolicy.merge(defPolicy)

                    if not hasattr(self, "map_" + datasetType) and 'composite' in subPolicy:
                        def compositeClosure(dataId, write=False, mapper=None, mapping=None,
                                             subPolicy=subPolicy):
                            components = subPolicy.get('composite')
                            assembler = subPolicy['assembler'] if 'assembler' in subPolicy else None
                            disassembler = subPolicy['disassembler'] if 'disassembler' in subPolicy else None
                            python = subPolicy['python']
                            butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
                                                                         disassembler=disassembler,
                                                                         python=python,
                                                                         dataId=dataId,
                                                                         mapper=self)
                            for name, component in components.items():
                                butlerComposite.add(id=name,
                                                    datasetType=component.get('datasetType'),
                                                    setter=component.get('setter', None),
                                                    getter=component.get('getter', None),
                                                    subset=component.get('subset', False),
                                                    inputOnly=component.get('inputOnly', False))
                            return butlerComposite
                        setattr(self, "map_" + datasetType, compositeClosure)
                        # for now at least, don't set up any other handling for this dataset type.
                        continue

                    if name == "calibrations":
                        mapping = cls(datasetType, subPolicy, self.registry, self.calibRegistry, calibStorage,
                                      provided=provided, dataRoot=rootStorage)
                    else:
                        mapping = cls(datasetType, subPolicy, self.registry, rootStorage, provided=provided)
                    self.keyDict.update(mapping.keys())
                    mappings[datasetType] = mapping
                    self.mappings[datasetType] = mapping
                    if not hasattr(self, "map_" + datasetType):
                        def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
                            return mapping.map(mapper, dataId, write)
                        setattr(self, "map_" + datasetType, mapClosure)
                    if not hasattr(self, "query_" + datasetType):
                        def queryClosure(format, dataId, mapping=mapping):
                            return mapping.lookup(format, dataId)
                        setattr(self, "query_" + datasetType, queryClosure)
                    if hasattr(mapping, "standardize") and not hasattr(self, "std_" + datasetType):
                        def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
                            return mapping.standardize(mapper, item, dataId)
                        setattr(self, "std_" + datasetType, stdClosure)

                    def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
                        """Set convenience methods on CameraMapper"""
                        mapName = "map_" + datasetType + "_" + suffix
                        bypassName = "bypass_" + datasetType + "_" + suffix
                        queryName = "query_" + datasetType + "_" + suffix
                        if not hasattr(self, mapName):
                            setattr(self, mapName, mapImpl or getattr(self, "map_" + datasetType))
                        if not hasattr(self, bypassName):
                            if bypassImpl is None and hasattr(self, "bypass_" + datasetType):
                                bypassImpl = getattr(self, "bypass_" + datasetType)
                            if bypassImpl is not None:
                                setattr(self, bypassName, bypassImpl)
                        if not hasattr(self, queryName):
                            setattr(self, queryName, queryImpl or getattr(self, "query_" + datasetType))

                    # Filename of dataset
                    setMethods("filename", bypassImpl=lambda datasetType, pythonType, location, dataId:
                               [os.path.join(location.getStorage().root, p) for p in location.getLocations()])
                    # Metadata from FITS file
                    if subPolicy["storage"] == "FitsStorage":  # a FITS image
                        setMethods("md", bypassImpl=lambda datasetType, pythonType, location, dataId:
                                   readMetadata(location.getLocationsWithRoot()[0]))

                        # Add support for configuring FITS compression
                        addName = "add_" + datasetType
                        if not hasattr(self, addName):
                            setattr(self, addName, self.getImageCompressionSettings)

                        if name == "exposures":
                            setMethods("wcs", bypassImpl=lambda datasetType, pythonType, location, dataId:
                                       afwGeom.makeSkyWcs(readMetadata(location.getLocationsWithRoot()[0])))
                            setMethods("calib", bypassImpl=lambda datasetType, pythonType, location, dataId:
                                       afwImage.Calib(readMetadata(location.getLocationsWithRoot()[0])))
                            setMethods("visitInfo",
                                       bypassImpl=lambda datasetType, pythonType, location, dataId:
                                       afwImage.VisitInfo(readMetadata(location.getLocationsWithRoot()[0])))
                            setMethods("filter",
                                       bypassImpl=lambda datasetType, pythonType, location, dataId:
                                       afwImage.Filter(readMetadata(location.getLocationsWithRoot()[0])))
                            setMethods("detector",
                                       mapImpl=lambda dataId, write=False:
                                           dafPersist.ButlerLocation(
                                               pythonType="lsst.afw.cameraGeom.CameraConfig",
                                               cppType="Config",
                                               storageName="Internal",
                                               locationList="ignored",
                                               dataId=dataId,
                                               mapper=self,
                                               storage=None,
                                           ),
                                       bypassImpl=lambda datasetType, pythonType, location, dataId:
                                           self.camera[self._extractDetectorName(dataId)]
                                       )
                            setMethods("bbox", bypassImpl=lambda dsType, pyType, location, dataId:
                                       afwImage.bboxFromMetadata(
                                           readMetadata(location.getLocationsWithRoot()[0], hdu=1)))

                        elif name == "images":
                            setMethods("bbox", bypassImpl=lambda dsType, pyType, location, dataId:
                                       afwImage.bboxFromMetadata(
                                           readMetadata(location.getLocationsWithRoot()[0])))

                    if subPolicy["storage"] == "FitsCatalogStorage":  # a FITS catalog
                        setMethods("md", bypassImpl=lambda datasetType, pythonType, location, dataId:
                                   readMetadata(os.path.join(location.getStorage().root,
                                                             location.getLocations()[0]), hdu=1))

                    # Sub-images
                    if subPolicy["storage"] == "FitsStorage":
                        def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
                            subId = dataId.copy()
                            del subId['bbox']
                            loc = mapping.map(mapper, subId, write)
                            bbox = dataId['bbox']
                            llcX = bbox.getMinX()
                            llcY = bbox.getMinY()
                            width = bbox.getWidth()
                            height = bbox.getHeight()
                            loc.additionalData.set('llcX', llcX)
                            loc.additionalData.set('llcY', llcY)
                            loc.additionalData.set('width', width)
                            loc.additionalData.set('height', height)
                            if 'imageOrigin' in dataId:
                                loc.additionalData.set('imageOrigin',
                                                       dataId['imageOrigin'])
                            return loc

                        def querySubClosure(key, format, dataId, mapping=mapping):
                            subId = dataId.copy()
                            del subId['bbox']
                            return mapping.lookup(format, subId)
                        setMethods("sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)

                    if subPolicy["storage"] == "FitsCatalogStorage":
                        # Length of catalog
                        setMethods("len", bypassImpl=lambda datasetType, pythonType, location, dataId:
                                   readMetadata(os.path.join(location.getStorage().root,
                                                             location.getLocations()[0]),
                                                hdu=1).get("NAXIS2"))

                        # Schema of catalog
                        if not datasetType.endswith("_schema") and datasetType + "_schema" not in datasets:
                            setMethods("schema", bypassImpl=lambda datasetType, pythonType, location, dataId:
                                       afwTable.Schema.readFits(os.path.join(location.getStorage().root,
                                                                             location.getLocations()[0])))
Beispiel #17
0
 def checkBBoxFromMetadata(self, filename, expected, hdu=0):
     metadata = afwFits.readMetadata(filename, hdu)
     bbox = afwImage.bboxFromMetadata(metadata)
     self.assertEqual(bbox, expected)
    def runDataRef(self, butler, dataRefList):
        """!Make a skymap from the bounds of the given set of calexps.

        @param[in]  butler        data butler used to save the SkyMap
        @param[in]  dataRefList   dataRefs of calexps used to determine the size and pointing of the SkyMap
        @return     a pipeBase Struct containing:
                    - skyMap: the constructed SkyMap
        """
        self.log.info("Extracting bounding boxes of %d images" % len(dataRefList))
        points = []
        for dataRef in dataRefList:
            if not dataRef.datasetExists("calexp"):
                self.log.warn("CalExp for %s does not exist: ignoring" % (dataRef.dataId,))
                continue
            md = dataRef.get("calexp_md", immediate=True)
            wcs = afwGeom.makeSkyWcs(md)
            # nb: don't need to worry about xy0 because Exposure saves Wcs with CRPIX shifted by (-x0, -y0).
            boxI = afwImage.bboxFromMetadata(md)
            boxD = afwGeom.Box2D(boxI)
            points.extend(wcs.pixelToSky(corner).getVector() for corner in boxD.getCorners())
        if len(points) == 0:
            raise RuntimeError("No data found from which to compute convex hull")
        self.log.info("Computing spherical convex hull")
        polygon = lsst.sphgeom.ConvexPolygon.convexHull(points)
        if polygon is None:
            raise RuntimeError(
                "Failed to compute convex hull of the vertices of all calexp bounding boxes; "
                "they may not be hemispherical."
            )
        circle = polygon.getBoundingCircle()

        datasetName = self.config.coaddName + "Coadd_skyMap"

        skyMapConfig = DiscreteSkyMap.ConfigClass()
        if self.config.doAppend and butler.datasetExists(datasetName):
            oldSkyMap = butler.get(datasetName, immediate=True)
            if not isinstance(oldSkyMap.config, DiscreteSkyMap.ConfigClass):
                raise TypeError("Cannot append to existing non-discrete skymap")
            compareLog = []
            if not self.config.skyMap.compare(oldSkyMap.config, output=compareLog.append):
                raise ValueError("Cannot append to existing skymap - configurations differ:", *compareLog)
            skyMapConfig.raList.extend(oldSkyMap.config.raList)
            skyMapConfig.decList.extend(oldSkyMap.config.decList)
            skyMapConfig.radiusList.extend(oldSkyMap.config.radiusList)
        skyMapConfig.update(**self.config.skyMap.toDict())
        circleCenter = lsst.sphgeom.LonLat(circle.getCenter())
        skyMapConfig.raList.append(circleCenter[0].asDegrees())
        skyMapConfig.decList.append(circleCenter[1].asDegrees())
        circleRadiusDeg = circle.getOpeningAngle().asDegrees()
        skyMapConfig.radiusList.append(circleRadiusDeg + self.config.borderSize)
        skyMap = DiscreteSkyMap(skyMapConfig)

        for tractInfo in skyMap:
            wcs = tractInfo.getWcs()
            posBox = afwGeom.Box2D(tractInfo.getBBox())
            pixelPosList = (
                posBox.getMin(),
                afwGeom.Point2D(posBox.getMaxX(), posBox.getMinY()),
                posBox.getMax(),
                afwGeom.Point2D(posBox.getMinX(), posBox.getMaxY()),
            )
            skyPosList = [wcs.pixelToSky(pos).getPosition(afwGeom.degrees) for pos in pixelPosList]
            posStrList = ["(%0.3f, %0.3f)" % tuple(skyPos) for skyPos in skyPosList]
            self.log.info("tract %s has corners %s (RA, Dec deg) and %s x %s patches" %
                          (tractInfo.getId(), ", ".join(posStrList),
                           tractInfo.getNumPatches()[0], tractInfo.getNumPatches()[1]))
        if self.config.doWrite:
            butler.put(skyMap, datasetName)
        return pipeBase.Struct(
            skyMap=skyMap
        )
    def readSrc(self, dataRef):
        """Read source catalog etc for input dataRef

        The following are returned:
        Source catalog, matched list, and wcs will be read from 'src', 'srcMatch', and 'calexp_md',
        respectively.

        NOTE: If the detector has nQuarter%4 != 0 (i.e. it is rotated w.r.t the focal plane
              coordinate system), the (x, y) pixel values of the centroid slot for the source
              catalogs are rotated such that pixel (0, 0) is the LLC (i.e. the coordinate system
              expected by meas_mosaic).

        If color transformation information is given, it will be applied to the reference flux
        of the matched list.  The source catalog and matched list will be converted to measMosaic's
        Source and SourceMatch and returned.

        The number of 'Source's in each cell defined by config.cellSize will be limited to brightest
        config.nStarPerCell.
        """

        self.log = Log.getDefaultLogger()

        dataId = dataRef.dataId

        try:
            if not dataRef.datasetExists("src"):
                raise RuntimeError("no data for src %s" % (dataId))
            if not dataRef.datasetExists("calexp_md"):
                raise RuntimeError("no data for calexp_md %s" % (dataId))

            calexp_md = dataRef.get("calexp_md", immediate=True)
            detector = dataRef.get("camera")[dataRef.dataId["ccd"]]  # OK for HSC; maybe not for other cameras
            wcs = afwGeom.makeSkyWcs(calexp_md)
            nQuarter = detector.getOrientation().getNQuarter()
            sources = dataRef.get("src", immediate=True, flags=afwTable.SOURCE_IO_NO_FOOTPRINTS)

            # Check if we are looking at HSC stack outputs: if so, no pixel rotation of sources is
            # required, but alias mapping must be set to associate HSC's schema with that of LSST.
            hscRun = mosaicUtils.checkHscStack(calexp_md)
            if hscRun is None:
                if nQuarter%4 != 0:
                    dims = afwImage.bboxFromMetadata(calexp_md).getDimensions()
                    sources = mosaicUtils.rotatePixelCoords(sources, dims.getX(), dims.getY(),
                                                            nQuarter)

            # Set some alias maps for the source catalog where needed for
            # backwards compatibility
            if self.config.srcSchemaMap and hscRun:
                aliasMap = sources.schema.getAliasMap()
                for lsstName, otherName in self.config.srcSchemaMap.items():
                    aliasMap.set(lsstName, otherName)
            if self.config.flagsToAlias and "calib_psfUsed" in sources.schema:
                aliasMap = sources.schema.getAliasMap()
                for lsstName, otherName in self.config.flagsToAlias.items():
                    aliasMap.set(lsstName, otherName)

            refObjLoader = self.config.loadAstrom.apply(butler=dataRef.getButler())
            srcMatch = dataRef.get("srcMatch", immediate=True)
            if hscRun is not None:
                # The reference object loader grows the bbox by the config parameter pixelMargin.  This
                # is set to 50 by default but is not reflected by the radius parameter set in the
                # metadata, so some matches may reside outside the circle searched within this radius
                # Thus, increase the radius set in the metadata fed into joinMatchListWithCatalog() to
                # accommodate.
                matchmeta = srcMatch.table.getMetadata()
                rad = matchmeta.getDouble("RADIUS")
                matchmeta.setDouble("RADIUS", rad*1.05, "field radius in degrees, approximate, padded")
            matches = refObjLoader.joinMatchListWithCatalog(srcMatch, sources)

            # Set the aliap map for the matched sources (i.e. the [1] attribute schema for each match)
            if self.config.srcSchemaMap is not None and hscRun is not None:
                for mm in matches:
                    aliasMap = mm[1].schema.getAliasMap()
                    for lsstName, otherName in self.config.srcSchemaMap.items():
                        aliasMap.set(lsstName, otherName)

            if hscRun is not None:
                for slot in ("PsfFlux", "ModelFlux", "ApFlux", "GaussianFlux", "Centroid", "Shape"):
                    getattr(matches[0][1].getTable(), "define" + slot)(
                        getattr(sources, "get" + slot + "Definition")())
                    # For some reason, the CalibFlux slot in sources is coming up as centroid_sdss, so
                    # set it to flux_naive explicitly
                    for slot in ("CalibFlux", ):
                        getattr(matches[0][1].getTable(), "define" + slot)("flux_naive")
            matches = [m for m in matches if m[0] is not None]
            refSchema = matches[0][0].schema if matches else None

            if self.cterm is not None and len(matches) != 0:
                # Add a "flux" field to the input schema of the first element
                # of the match and populate it with a colorterm correct flux.
                mapper = afwTable.SchemaMapper(refSchema)
                for key, field in refSchema:
                    mapper.addMapping(key)
                fluxKey = mapper.editOutputSchema().addField("flux", type=float, doc="Reference flux")
                fluxErrKey = mapper.editOutputSchema().addField("fluxErr", type=float,
                                                                  doc="Reference flux uncertainty")
                table = afwTable.SimpleTable.make(mapper.getOutputSchema())
                table.preallocate(len(matches))
                for match in matches:
                    newMatch = table.makeRecord()
                    newMatch.assign(match[0], mapper)
                    match[0] = newMatch

                # extract the matched refCat as a Catalog for the colorterm code
                refCat = afwTable.SimpleCatalog(matches[0].first.schema)
                refCat.reserve(len(matches))
                for x in matches:
                    record = refCat.addNew()
                    record.assign(x.first)

                refMag, refMagErr = self.cterm.getCorrectedMagnitudes(refCat,
                                                                      afwImage.Filter(calexp_md).getName())
                # NOTE: mosaic assumes fluxes are in Jy
                refFlux = (refMag*astropy.units.ABmag).to_value(astropy.units.Jy)
                refFluxErr = afwImage.fluxErrFromABMagErr(refMagErr, refMag)
                matches = [self.setCatFlux(m, flux, fluxKey, fluxErr, fluxErrKey) for
                           m, flux, fluxErr in zip(matches, refFlux, refFluxErr) if flux == flux]
            else:
                filterName = afwImage.Filter(calexp_md).getName()
                refFluxField = measAlg.getRefFluxField(refSchema, filterName)
                refSchema.getAliasMap().set("flux", refFluxField)

            # LSST reads in a_net catalogs with flux in "janskys", so must convert back to DN.
            matches = mosaicUtils.matchJanskyToDn(matches)

            selSources = self.selectStars(sources, self.config.includeSaturated)
            selMatches = self.selectStars(matches, self.config.includeSaturated)

            retSrc = list()
            retMatch = list()

            if len(selMatches) > self.config.minNumMatch:
                naxis1, naxis2 = afwImage.bboxFromMetadata(calexp_md).getDimensions()
                if hscRun is None:
                    if nQuarter%2 != 0:
                        naxis1, naxis2 = naxis2, naxis1
                bbox = afwGeom.Box2I(afwGeom.Point2I(0, 0), afwGeom.Extent2I(naxis1, naxis2))
                cellSet = afwMath.SpatialCellSet(bbox, self.config.cellSize, self.config.cellSize)
                for s in selSources:
                    if numpy.isfinite(s.getRa().asDegrees()): # get rid of NaN
                        src = measMosaic.Source(s)
                        src.setExp(dataId["visit"])
                        src.setChip(dataId["ccd"])
                        try:
                            tmp = measMosaic.SpatialCellSource(src)
                            cellSet.insertCandidate(tmp)
                        except:
                            self.log.info("FAILED TO INSERT CANDIDATE: visit=%d ccd=%d x=%f y=%f" %
                                          (dataRef.dataId["visit"], dataRef.dataId["ccd"],
                                           src.getX(), src.getY()) + " bbox=" + str(bbox))
                for cell in cellSet.getCellList():
                    cell.sortCandidates()
                    for i, cand in enumerate(cell):
                        src = cand.getSource()
                        retSrc.append(src)
                        if i == self.config.nStarPerCell - 1:
                            break
                for m in selMatches:
                    if m[0] is not None and m[1] is not None:
                        match = (measMosaic.Source(m[0], wcs), measMosaic.Source(m[1]))
                        match[1].setExp(dataId["visit"])
                        match[1].setChip(dataId["ccd"])
                        retMatch.append(match)
            else:
                self.log.info("%8d %3d : %d/%d matches  Suspicious to wrong match. Ignore this CCD" %
                              (dataRef.dataId["visit"], dataRef.dataId["ccd"], len(selMatches), len(matches)))

        except Exception as e:
            self.log.warn("Failed to read %s: %s" % (dataId, e))
            return dataId, [None, None, None]

        return dataId, [retSrc, retMatch, wcs]
Beispiel #20
0
 def checkBBoxFromMetadata(self, filename, expected, hdu=0):
     metadata = afwFits.readMetadata(filename, hdu)
     bbox = afwImage.bboxFromMetadata(metadata)
     self.assertEqual(bbox, expected)
Beispiel #21
0
    def run(self, butler, dataRefList):
        """!Make a skymap from the bounds of the given set of calexps.

        @param[in]  butler        data butler used to save the SkyMap
        @param[in]  dataRefList   dataRefs of calexps used to determine the size and pointing of the SkyMap
        @return     a pipeBase Struct containing:
                    - skyMap: the constructed SkyMap
        """
        self.log.info("Extracting bounding boxes of %d images" % len(dataRefList))
        points = []
        for dataRef in dataRefList:
            if not dataRef.datasetExists("calexp"):
                self.log.warn("CalExp for %s does not exist: ignoring" % (dataRef.dataId,))
                continue
            md = dataRef.get("calexp_md", immediate=True)
            wcs = afwGeom.makeSkyWcs(md)
            # nb: don't need to worry about xy0 because Exposure saves Wcs with CRPIX shifted by (-x0, -y0).
            boxI = afwImage.bboxFromMetadata(md)
            boxD = afwGeom.Box2D(boxI)
            points.extend(wcs.pixelToSky(corner).getVector() for corner in boxD.getCorners())
        if len(points) == 0:
            raise RuntimeError("No data found from which to compute convex hull")
        self.log.info("Computing spherical convex hull")
        polygon = lsst.sphgeom.ConvexPolygon.convexHull(points)
        if polygon is None:
            raise RuntimeError(
                "Failed to compute convex hull of the vertices of all calexp bounding boxes; "
                "they may not be hemispherical."
            )
        circle = polygon.getBoundingCircle()

        datasetName = self.config.coaddName + "Coadd_skyMap"

        skyMapConfig = DiscreteSkyMap.ConfigClass()
        if self.config.doAppend and butler.datasetExists(datasetName):
            oldSkyMap = butler.get(datasetName, immediate=True)
            if not isinstance(oldSkyMap.config, DiscreteSkyMap.ConfigClass):
                raise TypeError("Cannot append to existing non-discrete skymap")
            compareLog = []
            if not self.config.skyMap.compare(oldSkyMap.config, output=compareLog.append):
                raise ValueError("Cannot append to existing skymap - configurations differ:", *compareLog)
            skyMapConfig.raList.extend(oldSkyMap.config.raList)
            skyMapConfig.decList.extend(oldSkyMap.config.decList)
            skyMapConfig.radiusList.extend(oldSkyMap.config.radiusList)
        skyMapConfig.update(**self.config.skyMap.toDict())
        circleCenter = lsst.sphgeom.LonLat(circle.getCenter())
        skyMapConfig.raList.append(circleCenter[0].asDegrees())
        skyMapConfig.decList.append(circleCenter[1].asDegrees())
        circleRadiusDeg = circle.getOpeningAngle().asDegrees()
        skyMapConfig.radiusList.append(circleRadiusDeg + self.config.borderSize)
        skyMap = DiscreteSkyMap(skyMapConfig)

        for tractInfo in skyMap:
            wcs = tractInfo.getWcs()
            posBox = afwGeom.Box2D(tractInfo.getBBox())
            pixelPosList = (
                posBox.getMin(),
                afwGeom.Point2D(posBox.getMaxX(), posBox.getMinY()),
                posBox.getMax(),
                afwGeom.Point2D(posBox.getMinX(), posBox.getMaxY()),
            )
            skyPosList = [wcs.pixelToSky(pos).getPosition(afwGeom.degrees) for pos in pixelPosList]
            posStrList = ["(%0.3f, %0.3f)" % tuple(skyPos) for skyPos in skyPosList]
            self.log.info("tract %s has corners %s (RA, Dec deg) and %s x %s patches" %
                          (tractInfo.getId(), ", ".join(posStrList),
                           tractInfo.getNumPatches()[0], tractInfo.getNumPatches()[1]))
        if self.config.doWrite:
            butler.put(skyMap, datasetName)
        return pipeBase.Struct(
            skyMap=skyMap
        )
    def run(self, wcs_md_tuple_list, oldSkyMap=None):
        """Make a SkyMap from the bounds of the given set of calexp metadata.

        Parameters
        ----------
        wcs_md_tuple_list : iterable
           A list of tuples with each element expected to be a (Wcs, PropertySet) pair
        oldSkyMap : `lsst.skymap.DiscreteSkyMap`, option
           The SkyMap to extend if appending
        Returns
        -------
        struct : `lsst.pipe.base.Struct
           The returned struct has one attribute, ``skyMap``, which holds the returned SkyMap
        """
        self.log.info("Extracting bounding boxes of %d images" % len(wcs_md_tuple_list))
        points = []
        for wcs, md in wcs_md_tuple_list:
            # nb: don't need to worry about xy0 because Exposure saves Wcs with CRPIX shifted by (-x0, -y0).
            boxI = afwImage.bboxFromMetadata(md)
            boxD = geom.Box2D(boxI)
            points.extend(wcs.pixelToSky(corner).getVector() for corner in boxD.getCorners())
        if len(points) == 0:
            raise RuntimeError("No data found from which to compute convex hull")
        self.log.info("Computing spherical convex hull")
        polygon = lsst.sphgeom.ConvexPolygon.convexHull(points)
        if polygon is None:
            raise RuntimeError(
                "Failed to compute convex hull of the vertices of all calexp bounding boxes; "
                "they may not be hemispherical."
            )
        circle = polygon.getBoundingCircle()

        skyMapConfig = DiscreteSkyMap.ConfigClass()
        if oldSkyMap:
            skyMapConfig.raList.extend(oldSkyMap.config.raList)
            skyMapConfig.decList.extend(oldSkyMap.config.decList)
            skyMapConfig.radiusList.extend(oldSkyMap.config.radiusList)
        skyMapConfig.update(**self.config.skyMap.toDict())
        circleCenter = lsst.sphgeom.LonLat(circle.getCenter())
        skyMapConfig.raList.append(circleCenter[0].asDegrees())
        skyMapConfig.decList.append(circleCenter[1].asDegrees())
        circleRadiusDeg = circle.getOpeningAngle().asDegrees()
        skyMapConfig.radiusList.append(circleRadiusDeg + self.config.borderSize)
        skyMap = DiscreteSkyMap(skyMapConfig)

        for tractInfo in skyMap:
            wcs = tractInfo.getWcs()
            posBox = geom.Box2D(tractInfo.getBBox())
            pixelPosList = (
                posBox.getMin(),
                geom.Point2D(posBox.getMaxX(), posBox.getMinY()),
                posBox.getMax(),
                geom.Point2D(posBox.getMinX(), posBox.getMaxY()),
            )
            skyPosList = [wcs.pixelToSky(pos).getPosition(geom.degrees) for pos in pixelPosList]
            posStrList = ["(%0.3f, %0.3f)" % tuple(skyPos) for skyPos in skyPosList]
            self.log.info("tract %s has corners %s (RA, Dec deg) and %s x %s patches" %
                          (tractInfo.getId(), ", ".join(posStrList),
                           tractInfo.getNumPatches()[0], tractInfo.getNumPatches()[1]))
        return pipeBase.Struct(
            skyMap=skyMap
        )