Пример #1
0
 def testRelationships(self):
     p = ConvexPolygon(
         [UnitVector3d.Z(),
          UnitVector3d.X(),
          UnitVector3d.Y()])
     self.assertTrue(p.contains(p.getCentroid()))
     self.assertEqual(p.getBoundingCircle().relate(p), CONTAINS)
 def testConstruction(self):
     points = [UnitVector3d.Z(), UnitVector3d.X(), UnitVector3d.Y()]
     p1 = ConvexPolygon(points)
     self.assertEqual(points, p1.getVertices())
     p2 = ConvexPolygon.cast(p1.clone())
     self.assertEqual(p1, p2)
     p3 = ConvexPolygon([-UnitVector3d.Z(), UnitVector3d.X(), UnitVector3d.Y()])
     self.assertNotEqual(p1, p3)
Пример #3
0
 def testCodec(self):
     p = ConvexPolygon(
         [UnitVector3d.Z(),
          UnitVector3d.X(),
          UnitVector3d.Y()])
     s = p.encode()
     self.assertEqual(ConvexPolygon.decode(s), p)
     self.assertEqual(Region.decode(s), p)
Пример #4
0
 def insertObservationRegions(self, registry, datastore):
     """Add spatial regions for visit-detector combinations.
     """
     sql = (
         "SELECT wcs.instrument AS instrument, wcs.visit AS visit, wcs.detector AS detector, "
         "        wcs.dataset_id AS wcs, metadata.dataset_id AS metadata "
         "    FROM dataset wcs "
         "        INNER JOIN dataset_collection wcs_collection "
         "            ON (wcs.dataset_id = wcs_collection.dataset_id) "
         "        INNER JOIN dataset metadata "
         "            ON (wcs.instrument = metadata.instrument "
         "                AND wcs.visit = metadata.visit "
         "                AND wcs.detector = metadata.detector) "
         "        INNER JOIN dataset_collection metadata_collection "
         "            ON (metadata.dataset_id = metadata_collection.dataset_id) "
         "    WHERE wcs_collection.collection = :collection "
         "          AND metadata_collection.collection = :collection "
         "          AND wcs.dataset_type_name = :wcs_name"
         "          AND metadata.dataset_type_name = :metadata_name")
     log = Log.getLogger("lsst.daf.butler.gen2convert")
     for config in self.config["regions"]:
         log.info("Adding observation regions using %s from %s.",
                  config["DatasetType"], config["collection"])
         visits = {}
         for row in registry.query(
                 sql,
                 collection=config["collection"],
                 wcs_name="{}.wcs".format(config["DatasetType"]),
                 metadata_name="{}.metadata".format(config["DatasetType"])):
             wcsRef = registry.getDataset(row["wcs"])
             metadataRef = registry.getDataset(row["metadata"])
             wcs = datastore.get(wcsRef)
             metadata = datastore.get(metadataRef)
             bbox = Box2D(bboxFromMetadata(metadata))
             bbox.grow(config["padding"])
             region = ConvexPolygon([
                 sp.getVector() for sp in wcs.pixelToSky(bbox.getCorners())
             ])
             registry.setDimensionRegion(
                 {k: row[k]
                  for k in ("instrument", "visit", "detector")},
                 region=region,
                 update=False)
             visits.setdefault((row["instrument"], row["visit"]),
                               []).extend(region.getVertices())
         for (instrument, visit), vertices in visits.items():
             region = ConvexPolygon(vertices)
             registry.setDimensionRegion(instrument=instrument,
                                         visit=visit,
                                         region=region)
Пример #5
0
 def testUpdate(self):
     """Tests for `Database.update`.
     """
     db = self.makeEmptyDatabase(origin=1)
     with db.declareStaticTables(create=True) as context:
         tables = context.addTableTuple(STATIC_TABLE_SPECS)
     # Insert two rows into table a, both without regions.
     db.insert(tables.a, {"name": "a1"}, {"name": "a2"})
     # Update one of the rows with a region.
     region = ConvexPolygon(
         (UnitVector3d(1, 0, 0), UnitVector3d(0, 1,
                                              0), UnitVector3d(0, 0, 1)))
     n = db.update(tables.a, {"name": "k"}, {"k": "a2", "region": region})
     self.assertEqual(n, 1)
     sql = sqlalchemy.sql.select(
         [tables.a.columns.name,
          tables.a.columns.region]).select_from(tables.a)
     self.assertCountEqual([dict(r) for r in db.query(sql).fetchall()],
                           [{
                               "name": "a1",
                               "region": None
                           }, {
                               "name": "a2",
                               "region": region
                           }])
Пример #6
0
    def trimFakeCat(self, fakeCat, image, wcs):
        """Trim the fake cat to about the size of the input image.

        Parameters
        ----------
        fakeCat : `pandas.core.frame.DataFrame`
                    The catalog of fake sources to be input
        image : `lsst.afw.image.exposure.exposure.ExposureF`
                    The image into which the fake sources should be added
        wcs : `lsst.afw.geom.skyWcs.skyWcs.SkyWcs`
                    WCS to use to add fake sources

        Returns
        -------
        fakeCat : `pandas.core.frame.DataFrame`
                    The original fakeCat trimmed to the area of the image
        """

        bbox = Box2D(image.getBBox())
        corners = bbox.getCorners()

        skyCorners = wcs.pixelToSky(corners)
        region = ConvexPolygon([s.getVector() for s in skyCorners])

        def trim(row):
            coord = SpherePoint(row[self.config.raColName],
                                row[self.config.decColName], radians)
            return region.contains(coord.getVector())

        return fakeCat[fakeCat.apply(trim, axis=1)]
Пример #7
0
 def testYaml(self):
     a = ConvexPolygon(
         [UnitVector3d.Z(),
          UnitVector3d.X(),
          UnitVector3d.Y()])
     b = yaml.safe_load(yaml.dump(a))
     self.assertEqual(a, b)
Пример #8
0
 def testPickle(self):
     a = ConvexPolygon(
         [UnitVector3d.Z(),
          UnitVector3d.X(),
          UnitVector3d.Y()])
     b = pickle.loads(pickle.dumps(a, pickle.HIGHEST_PROTOCOL))
     self.assertEqual(a, b)
Пример #9
0
    def _calculate_region_from_dataset_metadata(self, obsInfo, header, FormatterClass):
        """Calculate the sky region covered by the supplied observation
        information.

        Parameters
        ----------
        obsInfo : `~astro_metadata_translator.ObservationInfo`
            Summary information of this dataset.
        header : `Mapping`
            Header from the dataset.
        FormatterClass: `type` as subclass of  `FitsRawFormatterBase`
            Formatter class that should be used to compute the spatial region.

        Returns
        -------
        region : `lsst.sphgeom.ConvexPolygon`
            Region of sky covered by this observation.
        """
        if obsInfo.visit_id is not None and obsInfo.tracking_radec is not None:
            formatter = FormatterClass.fromMetadata(metadata=header, obsInfo=obsInfo)
            visitInfo = formatter.makeVisitInfo()
            detector = self.camera[obsInfo.detector_num]
            wcs = formatter.makeWcs(visitInfo, detector)
            pixBox = Box2D(detector.getBBox())
            if self.config.padRegionAmount > 0:
                pixBox.grow(self.config.padRegionAmount)
            pixCorners = pixBox.getCorners()
            sphCorners = [wcs.pixelToSky(point).getVector() for point in pixCorners]
            region = ConvexPolygon(sphCorners)
        else:
            region = None
        return region
Пример #10
0
    def collectDimensionRecords(self, exposure: RawExposureData) -> RawExposureData:
        """Collect the `DimensionRecord` instances that must be inserted into
        the `~lsst.daf.butler.Registry` before an exposure's raw files may be.

        Parameters
        ----------
        exposure : `RawExposureData`
            A structure containing information about the exposure to be
            ingested.  Should be considered consumed upon return.

        Returns
        -------
        exposure : `RawExposureData`
            An updated version of the input structure, with
            `RawExposureData.records` populated.
        """
        firstFile = exposure.files[0]
        firstDataset = firstFile.datasets[0]
        VisitDetectorRegionRecordClass = self.universe["visit_detector_region"].RecordClass
        exposure.records = {
            "exposure": [makeExposureRecordFromObsInfo(firstDataset.obsInfo, self.universe)],
        }
        if firstDataset.obsInfo.visit_id is not None:
            exposure.records["visit_detector_region"] = []
            visitVertices = []
            for file in exposure.files:
                for dataset in file.datasets:
                    if dataset.obsInfo.visit_id != firstDataset.obsInfo.visit_id:
                        raise ValueError(f"Inconsistent visit/exposure relationship for "
                                         f"exposure {firstDataset.obsInfo.exposure_id} between "
                                         f"{file.filename} and {firstFile.filename}: "
                                         f"{dataset.obsInfo.visit_id} != {firstDataset.obsInfo.visit_id}.")
                    if dataset.region is None:
                        self.log.warn("No region found for visit=%s, detector=%s.", dataset.obsInfo.visit_id,
                                      dataset.obsInfo.detector_num)
                        continue
                    visitVertices.extend(dataset.region.getVertices())
                    exposure.records["visit_detector_region"].append(
                        VisitDetectorRegionRecordClass.fromDict({
                            "instrument": dataset.obsInfo.instrument,
                            "visit": dataset.obsInfo.visit_id,
                            "detector": dataset.obsInfo.detector_num,
                            "region": dataset.region,
                        })
                    )
            if visitVertices:
                visitRegion = ConvexPolygon(visitVertices)
            else:
                self.log.warn("No region found for visit=%s.", firstDataset.obsInfo.visit_id)
                visitRegion = None
            exposure.records["visit"] = [
                makeVisitRecordFromObsInfo(firstDataset.obsInfo, self.universe, region=visitRegion)
            ]
        return exposure
Пример #11
0
 def compute(self,
             visit: VisitDefinitionData,
             *,
             collections: Any = None) -> Tuple[Region, Dict[int, Region]]:
     # Docstring inherited from ComputeVisitRegionsTask.
     if self.config.mergeExposures:
         detectorBounds = defaultdict(list)
         for exposure in visit.exposures:
             exposureDetectorBounds = self.computeExposureBounds(
                 exposure, collections=collections)
             for detectorId, bounds in exposureDetectorBounds.items():
                 detectorBounds[detectorId].extend(bounds)
     else:
         detectorBounds = self.computeExposureBounds(
             visit.exposures[0], collections=collections)
     visitBounds = []
     detectorRegions = {}
     for detectorId, bounds in detectorBounds.items():
         detectorRegions[detectorId] = ConvexPolygon.convexHull(bounds)
         visitBounds.extend(bounds)
     return ConvexPolygon.convexHull(visitBounds), detectorRegions
Пример #12
0
 def testString(self):
     p = ConvexPolygon(
         [UnitVector3d.Z(),
          UnitVector3d.X(),
          UnitVector3d.Y()])
     self.assertEqual(str(p), repr(p))
     self.assertEqual(
         repr(p), 'ConvexPolygon([UnitVector3d(0.0, 0.0, 1.0), '
         'UnitVector3d(1.0, 0.0, 0.0), '
         'UnitVector3d(0.0, 1.0, 0.0)])')
     self.assertEqual(
         p,
         eval(repr(p),
              dict(ConvexPolygon=ConvexPolygon, UnitVector3d=UnitVector3d)))
Пример #13
0
def poly_area(polygon: sph.ConvexPolygon) -> float:
    """Calculate area of a convex polygon.

    Parameters
    ----------
    polygon : `lsst.sphgeom.ConvexPolygon`

    Returns
    -------
    area : `float`
    """

    vertices = polygon.getVertices()
    area = 0.
    for i in range(2, len(vertices)):
        area += triangle_area(vertices[0], vertices[i - 1], vertices[i])
    return area
Пример #14
0
def find_intersecting_exposures(database, region):
    """Find exposures that intersect a spherical region.

    Parameters
    ----------

    database : sqlite3.Connection or str
        A connection to (or filename of) a SQLite 3 database containing
        an exposure index.

    region : lsst.sphgeom.Region
        The spherical region of interest.

    Returns
    -------

        A list of :class:`.ExposureInfo` objects corresponding to the
        exposures intersecting `region`.  Their ``data_id`` attributes
        are data-id objects that can be passed to a butler to retrieve
        the corresponding exposure, and their ``boundary`` attributes
        are |polygon| objects.
    """
    if isinstance(database, sqlite3.Connection):
        conn = database
    else:
        conn = sqlite3.connect(database)
    query = ("SELECT pickled_data_id, encoded_polygon\n"
             "FROM exposure JOIN exposure_rtree USING (rowid)\n"
             "WHERE x_min < ? AND x_max > ? AND\n"
             "      y_min < ? AND y_max > ? AND\n"
             "      z_min < ? AND z_max > ?")
    bbox = region.getBoundingBox3d()
    params = (bbox.x().getB(), bbox.x().getA(),
              bbox.y().getB(), bbox.y().getA(),
              bbox.z().getB(), bbox.z().getA())
    results = []
    for row in conn.execute(query, params):
        # Note that in Python 2, BLOB columns are mapped to Python buffer
        # objects, and so a conversion to str is necessary. In Python 3,
        # BLOBs are mapped to bytes directly, and the str() calls must
        # be removed.
        poly = ConvexPolygon.decode(str(row[1]))
        if region.relate(poly) != DISJOINT:
            results.append(ExposureInfo(pickle.loads(str(row[0])), poly))
    return results
Пример #15
0
def makeSkyPolygonFromBBox(bbox, wcs):
    """Make an on-sky polygon from a bbox and a SkyWcs

    Parameters
    ----------
    bbox : `lsst.geom.Box2I` or `lsst.geom.Box2D`
        Bounding box of region, in pixel coordinates
    wcs : `lsst.afw.geom.SkyWcs`
        Celestial WCS

    Returns
    -------
    polygon : `lsst.sphgeom.ConvexPolygon`
        On-sky region
    """
    pixelPoints = Box2D(bbox).getCorners()
    skyPoints = wcs.pixelToSky(pixelPoints)
    return ConvexPolygon.convexHull([sp.getVector() for sp in skyPoints])
Пример #16
0
def makeSkyPolygonFromBBox(bbox, wcs):
    """Make an on-sky polygon from a bbox and a SkyWcs

    Parameters
    ----------
    bbox : `lsst.geom.Box2I` or `lsst.geom.Box2D`
        Bounding box of region, in pixel coordinates
    wcs : `lsst.afw.geom.SkyWcs`
        Celestial WCS

    Returns
    -------
    polygon : `lsst.sphgeom.ConvexPolygon`
        On-sky region
    """
    pixelPoints = Box2D(bbox).getCorners()
    skyPoints = wcs.pixelToSky(pixelPoints)
    return ConvexPolygon.convexHull([sp.getVector() for sp in skyPoints])
Пример #17
0
 def testReplace(self):
     """Tests for `Database.replace`.
     """
     db = self.makeEmptyDatabase(origin=1)
     with db.declareStaticTables(create=True) as context:
         tables = context.addTableTuple(STATIC_TABLE_SPECS)
     # Use 'replace' to insert a single row that contains a region and
     # query to get it back.
     region = ConvexPolygon(
         (UnitVector3d(1, 0, 0), UnitVector3d(0, 1,
                                              0), UnitVector3d(0, 0, 1)))
     row1 = {"name": "a1", "region": region}
     db.replace(tables.a, row1)
     self.assertEqual(
         [dict(r) for r in db.query(tables.a.select()).fetchall()], [row1])
     # Insert another row without a region.
     row2 = {"name": "a2", "region": None}
     db.replace(tables.a, row2)
     self.assertCountEqual(
         [dict(r) for r in db.query(tables.a.select()).fetchall()],
         [row1, row2])
     # Use replace to re-insert both of those rows again, which should do
     # nothing.
     db.replace(tables.a, row1, row2)
     self.assertCountEqual(
         [dict(r) for r in db.query(tables.a.select()).fetchall()],
         [row1, row2])
     # Replace row1 with a row with no region, while reinserting row2.
     row1a = {"name": "a1", "region": None}
     db.replace(tables.a, row1a, row2)
     self.assertCountEqual(
         [dict(r) for r in db.query(tables.a.select()).fetchall()],
         [row1a, row2])
     # Replace both rows, returning row1 to its original state, while adding
     # a new one.  Pass them in in a different order.
     row2a = {"name": "a2", "region": region}
     row3 = {"name": "a3", "region": None}
     db.replace(tables.a, row3, row2a, row1)
     self.assertCountEqual(
         [dict(r) for r in db.query(tables.a.select()).fetchall()],
         [row1, row2a, row3])
Пример #18
0
 def testConstruction(self):
     points = [UnitVector3d.Z(), UnitVector3d.X(), UnitVector3d.Y()]
     p1 = ConvexPolygon(points)
     self.assertEqual(points, p1.getVertices())
     p2 = p1.clone()
     self.assertEqual(p1, p2)
     p3 = ConvexPolygon(
         [-UnitVector3d.Z(),
          UnitVector3d.X(),
          UnitVector3d.Y()])
     self.assertNotEqual(p1, p3)
     p4 = ConvexPolygon.convexHull([
         UnitVector3d.Y(),
         UnitVector3d.X(),
         UnitVector3d(1, 1, 1),
         UnitVector3d.Z()
     ])
     self.assertEqual(p1, p4)
Пример #19
0
 def process_result_value(self, value: Optional[str], dialect: sqlalchemy.engine.Dialect
                          ) -> Optional[ConvexPolygon]:
     if value is None:
         return None
     return ConvexPolygon.decode(super().process_result_value(value, dialect))
 def testRelationships(self):
     p = ConvexPolygon([UnitVector3d.Z(), UnitVector3d.X(), UnitVector3d.Y()])
     self.assertTrue(p.contains(p.getCentroid()))
     self.assertEqual(p.getBoundingCircle().relate(p), CONTAINS)
 def testCodec(self):
     p = ConvexPolygon([UnitVector3d.Z(), UnitVector3d.X(), UnitVector3d.Y()])
     s = p.encode()
     self.assertEqual(ConvexPolygon.decode(s), p)
     self.assertEqual(ConvexPolygon.cast(Region.decode(s)), p)
Пример #22
0
 def getInnerSkyPolygon(self):
     """Get inner on-sky region as a sphgeom.ConvexPolygon.
     """
     skyUnitVectors = [sp.getVector() for sp in self.getVertexList()]
     return ConvexPolygon.convexHull(skyUnitVectors)
Пример #23
0
 def getInnerSkyPolygon(self):
     """Get inner on-sky region as a sphgeom.ConvexPolygon.
     """
     skyUnitVectors = [sp.getVector() for sp in self.getVertexList()]
     return ConvexPolygon.convexHull(skyUnitVectors)
Пример #24
0
    def index(self, exposure_or_metadata, data_id, database):
        """Spatially index an |exposure| or |metadata| object.

        Parameters
        ----------

        exposure_or_metadata : lsst.afw.image.Exposure[DFILU] or lsst.daf.base.PropertySet
            An afw |exposure| or corresponding |metadata| object.

        data_id : object
            An object identifying a single exposure (e.g. as used by the
            butler). It must be possible to pickle `data_id`.

        database : sqlite3.Connection or str
            A connection to (or filename of) a SQLite 3 database.

        Returns
        -------

        ``None``, unless the |defer_writes| coniguration parameter is ``True``.
        In that case, an :class:`.ExposureInfo` object containing a pickled
        data-id and an |encoded| |polygon| is returned.
        """
        # Get a pixel index bounding box for the exposure.
        if isinstance(exposure_or_metadata, daf_base.PropertySet):
            md = exposure_or_metadata
            # Map (LTV1, LTV2) to LSST (x0, y0). LSST convention says that
            # (x0, y0) is the location of the sub-image origin (the bottom-left
            # corner) relative to the origin of the parent, whereas LTVi encode
            # the origin of the parent relative to the origin of the subimage.
            pixel_bbox = afw_image.bboxFromMetadata(md)
            wcs = afw_image.makeWcs(md, False)
        else:
            pixel_bbox = exposure_or_metadata.getBBox()
            wcs = exposure_or_metadata.getWcs()
        # Pad the box by a configurable amount and bail if the result is empty.
        pixel_bbox.grow(self.config.pad_pixels)
        if pixel_bbox.isEmpty():
            self.log.warn("skipping exposure indexing for dataId=%s: "
                          "empty bounding box", data_id)
            return
        corners = []
        for c in pixel_bbox.getCorners():
            # Convert the box corners from pixel indexes to pixel positions,
            # and then to sky coordinates.
            c = wcs.pixelToSky(afw_image.indexToPosition(c.getX()),
                               afw_image.indexToPosition(c.getY()))
            c = (c.getLongitude().asRadians(), c.getLatitude().asRadians())
            # Bail if any coordinate is not finite.
            if any(math.isinf(x) or math.isnan(x) for x in c):
                self.log.warn("skipping exposure indexing for dataId=%s: "
                              "NaN or Inf in bounding box sky coordinate(s)"
                              " - bad WCS?", data_id)
                return
            # Convert from sky coordinates to unit vectors.
            corners.append(UnitVector3d(Angle.fromRadians(c[0]),
                                        Angle.fromRadians(c[1])))
        # Create a convex polygon containing the exposure pixels. When sphgeom
        # gains support for non-convex polygons, this could be changed to map
        # exposure.getPolygon() to a spherical equivalent, or to subdivide box
        # edges in pixel space to account for non linear projections. This
        # would have higher accuracy than the current approach of connecting
        # corner sky coordinates with great circles.
        poly = ConvexPolygon(corners)
        # Finally, persist or return the exposure information.
        info = ExposureInfo(pickle.dumps(data_id), poly.encode())
        if self.config.defer_writes:
            return info
        store_exposure_info(database, self.config.allow_replace, info)
Пример #25
0
def store_exposure_info(database, allow_replace, exposure_info):
    """Store exposure data-ids and bounding polygons in the given database.

    The database is assumed to have been initialized via
    :func:`.create_exposure_tables`.

    Parameters
    ----------

    database : sqlite3.Connection or str
        A connection to (or filename of) a SQLite 3 database.

    allow_replace : bool
        If ``True``, information for previously stored exposures with matching
        data-ids will be overwritten.

    exposure_info : iterable or lsst.daf.ingest.indexExposure.ExposureInfo
        One or more :class:`.ExposureInfo` objects to persist. Their
        ``data_id`` attributes must be pickled data-ids, and their
        ``boundary`` attributes must be |encoded| |polygon| objects.
    """
    if isinstance(database, sqlite3.Connection):
        conn = database
    else:
        conn = sqlite3.connect(database)
    with conn:
        cursor = conn.cursor()
        if isinstance(exposure_info, ExposureInfo):
            exposure_info = (exposure_info,)
        # Insert or update information in database
        for info in exposure_info:
            if info is None:
                continue
            # In Python 2, the sqlite3 module maps between Python buffer
            # objects and BLOBs. When migrating to Python 3, the buffer()
            # calls should be removed (sqlite3 maps bytes objects to BLOBs).
            pickled_data_id = buffer(info.data_id)
            encoded_polygon = buffer(info.boundary)
            bbox = ConvexPolygon.decode(info.boundary).getBoundingBox3d()
            if allow_replace:
                # See if there is already an entry for the given data id.
                cursor.execute(
                    'SELECT rowid FROM exposure WHERE pickled_data_id = ?',
                    (pickled_data_id,)
                )
                results = cursor.fetchall()
                if len(results) > 0:
                    # If so, update spatial information for the exposure.
                    row_id = results[0][0]
                    cursor.execute(
                        'UPDATE exposure\n'
                        '    SET encoded_polygon = ?\n'
                        '    WHERE rowid = ?',
                        (encoded_polygon, row_id)
                    )
                    cursor.execute(
                        'UPDATE exposure_rtree SET\n'
                        '    x_min = ?, x_max = ?,\n'
                        '    y_min = ?, y_max = ?,\n'
                        '    z_min = ?, z_max = ?\n'
                        'WHERE rowid = ?',
                        (bbox.x().getA(), bbox.x().getB(),
                         bbox.y().getA(), bbox.y().getB(),
                         bbox.z().getA(), bbox.z().getB(),
                         row_id)
                    )
                    return
            # Insert the data id and corresponding spatial information.
            cursor.execute(
                'INSERT INTO exposure\n'
                '    (pickled_data_id, encoded_polygon)\n'
                '    VALUES (?, ?)',
                (pickled_data_id, encoded_polygon)
            )
            row_id = cursor.lastrowid
            cursor.execute(
                'INSERT INTO exposure_rtree\n'
                '     (rowid, x_min, x_max, y_min, y_max, z_min, z_max)\n'
                '     VALUES (?, ?, ?, ?, ?, ?, ?)',
                (row_id,
                 bbox.x().getA(), bbox.x().getB(),
                 bbox.y().getA(), bbox.y().getB(),
                 bbox.z().getA(), bbox.z().getB())
            )
Пример #26
0
def main(subset, margin=10):
    visits = {}
    regions = {}
    vertices = {}
    raw = {}
    bias = {}
    dark = {}
    flat = {}
    for ref in subset:
        visit = ref.dataId["visit"]
        sensor = ref.dataId["ccd"]
        filter = ref.dataId["filter"]
        if visit not in visits:
            info = ref.get("calexp_visitInfo")
            obsMid = info.getDate()
            expTime = info.getExposureTime()
            # convert from middle of exposure date/time to beginning,
            # by subtracting half of the exposure time in nanoseconds
            obsBegin = DateTime(obsMid.nsecs() - int(expTime)*500000000)
            visits[visit] = (filter, obsBegin, expTime)
        raw[visit, sensor] = ref.get("raw_filename")[0]
        biasUnits = selectCalibUnits("bias", sensor)
        assert biasUnits[0] <= visit and biasUnits[1] > visit
        bias[biasUnits] = ref.get("bias_filename")[0]
        darkUnits = selectCalibUnits("dark", sensor)
        assert darkUnits[0] <= visit and darkUnits[1] > visit
        dark[darkUnits] = ref.get("dark_filename")[0]
        flatUnits = selectCalibUnits("flat", sensor, filter)
        assert flatUnits[0] <= visit and flatUnits[1] > visit
        flat[flatUnits] = ref.get("flat_filename")[0]
        bbox = ref.get("calexp_bbox")
        wcs = ref.get("calexp_wcs")
        region = makeBoxWcsRegion(box=bbox, wcs=wcs, margin=margin)
        regions[visit, sensor] = region
        vertices.setdefault(visit, []).extend(region.getVertices())

    tables["Dataset"] = np.zeros(len(raw) + len(bias) + len(dark) + len(flat),
                                 dtype=dtypes["Dataset"])
    tables["Visit"] = np.zeros(len(visits), dtype=dtypes["Visit"])
    tables["Snap"] = np.zeros(len(visits), dtype=dtypes["Snap"])
    tables["ObservedSensor"] = np.zeros(len(raw), dtype=dtypes["ObservedSensor"])
    tables["ObservedSensor"] = np.zeros(len(raw), dtype=dtypes["ObservedSensor"])
    tables["SnapDatasetJoin"] = np.zeros(len(raw), dtype=dtypes["SnapDatasetJoin"])
    tables["PhysicalSensorDatasetJoin"] = np.zeros(len(raw) + len(bias) + len(dark) + len(flat),
                                                   dtype=dtypes["PhysicalSensorDatasetJoin"])
    tables["PhysicalFilterDatasetJoin"] = np.zeros(len(flat), dtype=dtypes["PhysicalFilterDatasetJoin"])
    tables["VisitRangeDatasetJoin"] = np.zeros(len(flat) + len(bias) + len(dark),
                                               dtype=dtypes["VisitRangeDatasetJoin"])

    snapIndex = 1
    cameraName = "HSC"
    for n, (visit, (filter, obsBegin, expTime)) in enumerate(visits.items()):
        visitRecord = tables["Visit"][n]
        visitRecord["visit_number"] = visit
        visitRecord["physical_filter_name"] = filter
        visitRecord["obs_begin"] = obsBegin
        visitRecord["exposure_time"] = expTime
        visitRecord["region"] = ConvexPolygon.convexHull(vertices[visit]).encode()
        visitRecord["camera_name"] = cameraName
        snapRecord = tables["Snap"][n]
        snapRecord["visit_number"] = visit
        snapRecord["snap_index"] = snapIndex
        snapRecord["obs_begin"] = obsBegin
        snapRecord["exposure_time"] = expTime
        snapRecord["camera_name"] = cameraName

    datasetId = 1
    registryId = 1
    runId = 0
    for n, ((visit, sensor), uri) in enumerate(raw.items()):
        datasetRecord = tables["Dataset"][n]
        datasetRecord["dataset_id"] = datasetId
        datasetRecord["registry_id"] = registryId
        datasetRecord["uri"] = uri
        datasetRecord["dataset_type_name"] = "raw"
        datasetRecord["run_id"] = runId
        osRecord = tables["ObservedSensor"][n]
        osRecord["visit_number"] = visit
        osRecord["physical_sensor_number"] = sensor
        osRecord["camera_name"] = cameraName
        osRecord["region"] = regions[visit, sensor].encode()
        snapJoinRecord = tables["SnapDatasetJoin"][n]
        snapJoinRecord["dataset_id"] = datasetId
        snapJoinRecord["registry_id"] = registryId
        snapJoinRecord["visit_number"] = visit
        snapJoinRecord["camera_name"] = cameraName
        snapJoinRecord["snap_index"] = snapIndex
        psJoinRecord = tables["PhysicalSensorDatasetJoin"][n]
        psJoinRecord["dataset_id"] = datasetId
        psJoinRecord["registry_id"] = registryId
        psJoinRecord["physical_sensor_number"] = sensor
        psJoinRecord["camera_name"] = cameraName
        datasetId += 1

    for n1, ((visitBegin, visitEnd, sensor), uri) in enumerate(bias.items()):
        n2 = n1 + len(raw)
        n3 = n1
        datasetRecord = tables["Dataset"][n2]
        datasetRecord["dataset_id"] = datasetId
        datasetRecord["registry_id"] = registryId
        datasetRecord["uri"] = uri
        datasetRecord["dataset_type_name"] = "bias"
        datasetRecord["run_id"] = runId
        psJoinRecord = tables["PhysicalSensorDatasetJoin"][n2]
        psJoinRecord["dataset_id"] = datasetId
        psJoinRecord["registry_id"] = registryId
        psJoinRecord["physical_sensor_number"] = sensor
        psJoinRecord["camera_name"] = cameraName
        vrJoinRecord = tables["VisitRangeDatasetJoin"][n3]
        vrJoinRecord["dataset_id"] = datasetId
        vrJoinRecord["registry_id"] = registryId
        vrJoinRecord["visit_begin"] = visitBegin
        vrJoinRecord["visit_end"] = visitEnd
        vrJoinRecord["camera_name"] = cameraName
        datasetId += 1

    for n1, ((visitBegin, visitEnd, sensor), uri) in enumerate(dark.items()):
        n2 = n1 + len(raw) + len(bias)
        n3 = n1 + len(bias)
        datasetRecord = tables["Dataset"][n2]
        datasetRecord["dataset_id"] = datasetId
        datasetRecord["registry_id"] = registryId
        datasetRecord["uri"] = uri
        datasetRecord["dataset_type_name"] = "dark"
        datasetRecord["run_id"] = runId
        psJoinRecord = tables["PhysicalSensorDatasetJoin"][n2]
        psJoinRecord["dataset_id"] = datasetId
        psJoinRecord["registry_id"] = registryId
        psJoinRecord["physical_sensor_number"] = sensor
        psJoinRecord["camera_name"] = cameraName
        vrJoinRecord = tables["VisitRangeDatasetJoin"][n3]
        vrJoinRecord["dataset_id"] = datasetId
        vrJoinRecord["registry_id"] = registryId
        vrJoinRecord["visit_begin"] = visitBegin
        vrJoinRecord["visit_end"] = visitEnd
        vrJoinRecord["camera_name"] = cameraName
        datasetId += 1

    for n1, ((visitBegin, visitEnd, sensor, filter), uri) in enumerate(flat.items()):
        n2 = n1 + len(raw) + len(bias) + len(dark)
        n3 = n1 + len(bias) + len(dark)
        datasetRecord = tables["Dataset"][n2]
        datasetRecord["dataset_id"] = datasetId
        datasetRecord["registry_id"] = registryId
        datasetRecord["uri"] = uri
        datasetRecord["dataset_type_name"] = "flat"
        datasetRecord["run_id"] = runId
        psJoinRecord = tables["PhysicalSensorDatasetJoin"][n2]
        psJoinRecord["dataset_id"] = datasetId
        psJoinRecord["registry_id"] = registryId
        psJoinRecord["physical_sensor_number"] = sensor
        psJoinRecord["camera_name"] = cameraName
        vrJoinRecord = tables["VisitRangeDatasetJoin"][n3]
        vrJoinRecord["dataset_id"] = datasetId
        vrJoinRecord["registry_id"] = registryId
        vrJoinRecord["visit_begin"] = visitBegin
        vrJoinRecord["visit_end"] = visitEnd
        vrJoinRecord["camera_name"] = cameraName
        pfJoinRecord = tables["PhysicalFilterDatasetJoin"][n1]
        pfJoinRecord["dataset_id"] = datasetId
        pfJoinRecord["registry_id"] = registryId
        pfJoinRecord["physical_filter_name"] = filter
        pfJoinRecord["camera_name"] = cameraName
        datasetId += 1

    for name, table in tables.items():
        np.save(os.path.join(DATA_ROOT, "{}.npy".format(name)), table)
Пример #27
0
    def testRegions(self):
        """Tests for using region fields in `Registry` dimensions.
        """
        # TODO: the test regions used here are enormous (significant fractions
        # of the sphere), and that makes this test prohibitively slow on
        # most real databases.  These should be made more realistic, and the
        # test moved to daf/butler/registry/tests/registry.py.
        registry = self.makeRegistry()
        regionTract = ConvexPolygon(
            (UnitVector3d(1, 0, 0), UnitVector3d(0, 1,
                                                 0), UnitVector3d(0, 0, 1)))
        regionPatch = ConvexPolygon(
            (UnitVector3d(1, 1, 0), UnitVector3d(0, 1,
                                                 0), UnitVector3d(0, 0, 1)))
        regionVisit = ConvexPolygon(
            (UnitVector3d(1, 0, 0), UnitVector3d(0, 1,
                                                 1), UnitVector3d(0, 0, 1)))
        regionVisitDetector = ConvexPolygon(
            (UnitVector3d(1, 0, 0), UnitVector3d(0, 1,
                                                 0), UnitVector3d(0, 1, 1)))
        for a, b in itertools.combinations(
            (regionTract, regionPatch, regionVisit, regionVisitDetector), 2):
            self.assertNotEqual(a, b)

        # This depends on current dimensions.yaml definitions
        self.assertEqual(
            len(list(registry.queryDimensions(["patch", "htm7"]))), 0)

        # Add some dimension entries
        registry.insertDimensionData("instrument", {"name": "DummyCam"})
        registry.insertDimensionData("physical_filter", {
            "instrument": "DummyCam",
            "name": "dummy_r",
            "abstract_filter": "r"
        }, {
            "instrument": "DummyCam",
            "name": "dummy_i",
            "abstract_filter": "i"
        })
        for detector in (1, 2, 3, 4, 5):
            registry.insertDimensionData(
                "detector", {
                    "instrument": "DummyCam",
                    "id": detector,
                    "full_name": str(detector)
                })
        registry.insertDimensionData(
            "visit", {
                "instrument": "DummyCam",
                "id": 0,
                "name": "zero",
                "physical_filter": "dummy_r",
                "region": regionVisit
            }, {
                "instrument": "DummyCam",
                "id": 1,
                "name": "one",
                "physical_filter": "dummy_i"
            })
        registry.insertDimensionData("skymap", {
            "skymap": "DummySkyMap",
            "hash": bytes()
        })
        registry.insertDimensionData("tract", {
            "skymap": "DummySkyMap",
            "tract": 0,
            "region": regionTract
        })
        registry.insertDimensionData(
            "patch", {
                "skymap": "DummySkyMap",
                "tract": 0,
                "patch": 0,
                "cell_x": 0,
                "cell_y": 0,
                "region": regionPatch
            })
        registry.insertDimensionData(
            "visit_detector_region", {
                "instrument": "DummyCam",
                "visit": 0,
                "detector": 2,
                "region": regionVisitDetector
            })

        def getRegion(dataId):
            return registry.expandDataId(dataId).region

        # Get region for a tract
        self.assertEqual(regionTract,
                         getRegion({
                             "skymap": "DummySkyMap",
                             "tract": 0
                         }))
        # Attempt to get region for a non-existent tract
        with self.assertRaises(LookupError):
            getRegion({"skymap": "DummySkyMap", "tract": 1})
        # Get region for a (tract, patch) combination
        self.assertEqual(
            regionPatch,
            getRegion({
                "skymap": "DummySkyMap",
                "tract": 0,
                "patch": 0
            }))
        # Get region for a non-existent (tract, patch) combination
        with self.assertRaises(LookupError):
            getRegion({"skymap": "DummySkyMap", "tract": 0, "patch": 1})
        # Get region for a visit
        self.assertEqual(regionVisit,
                         getRegion({
                             "instrument": "DummyCam",
                             "visit": 0
                         }))
        # Attempt to get region for a non-existent visit
        with self.assertRaises(LookupError):
            getRegion({"instrument": "DummyCam", "visit": 10})
        # Get region for a (visit, detector) combination
        self.assertEqual(
            regionVisitDetector,
            getRegion({
                "instrument": "DummyCam",
                "visit": 0,
                "detector": 2
            }))
        # Attempt to get region for a non-existent (visit, detector)
        # combination.  This returns None rather than raising because we don't
        # want to require the region record to be present.
        self.assertIsNone(
            getRegion({
                "instrument": "DummyCam",
                "visit": 0,
                "detector": 3
            }))
        # getRegion for a dataId containing no spatial dimensions should
        # return None
        self.assertIsNone(getRegion({"instrument": "DummyCam"}))
        # getRegion for a mix of spatial dimensions should return
        # NotImplemented, at least until we get it implemented.
        self.assertIs(
            getRegion({
                "instrument": "DummyCam",
                "visit": 0,
                "detector": 2,
                "skymap": "DummySkyMap",
                "tract": 0
            }), NotImplemented)
        # Check if we can get the region for a skypix
        self.assertIsInstance(getRegion({"htm9": 1000}), ConvexPolygon)
        # patch_htm7_overlap should not be empty
        self.assertNotEqual(
            len(list(registry.queryDimensions(["patch", "htm7"]))), 0)
 def testRelationships(self):
     p = ConvexPolygon(
         [UnitVector3d.Z(),
          UnitVector3d.X(),
          UnitVector3d.Y()])
     self.assertTrue(p.contains(p.getCentroid()))
     boundingCircle = p.getBoundingCircle()
     self.assertEqual(boundingCircle.relate(p), CONTAINS)
     self.assertTrue(p.isWithin(boundingCircle))
     self.assertTrue(p.intersects(boundingCircle))
     self.assertFalse(p.isDisjointFrom(boundingCircle))
     self.assertFalse(p.contains(boundingCircle))
     tinyCircle = Circle(boundingCircle.getCenter())
     self.assertFalse(p.isWithin(tinyCircle))
     self.assertTrue(p.intersects(tinyCircle))
     self.assertFalse(p.isDisjointFrom(tinyCircle))
     self.assertTrue(p.contains(tinyCircle))
Пример #29
0
    def testInsertQueryDelete(self):
        """Test the `Database.insert`, `Database.query`, and `Database.delete`
        methods, as well as the `Base64Region` type and the ``onDelete``
        argument to `ddl.ForeignKeySpec`.
        """
        db = self.makeEmptyDatabase(origin=1)
        with db.declareStaticTables(create=True) as context:
            tables = context.addTableTuple(STATIC_TABLE_SPECS)
        # Insert a single, non-autoincrement row that contains a region and
        # query to get it back.
        region = ConvexPolygon(
            (UnitVector3d(1, 0, 0), UnitVector3d(0, 1,
                                                 0), UnitVector3d(0, 0, 1)))
        row = {"name": "a1", "region": region}
        db.insert(tables.a, row)
        self.assertEqual(
            [dict(r) for r in db.query(tables.a.select()).fetchall()], [row])
        # Insert multiple autoincrement rows but do not try to get the IDs
        # back immediately.
        db.insert(tables.b, {
            "name": "b1",
            "value": 10
        }, {
            "name": "b2",
            "value": 20
        })
        results = [
            dict(r)
            for r in db.query(tables.b.select().order_by("id")).fetchall()
        ]
        self.assertEqual(len(results), 2)
        for row in results:
            self.assertIn(row["name"], ("b1", "b2"))
            self.assertIsInstance(row["id"], int)
        self.assertGreater(results[1]["id"], results[0]["id"])
        # Insert multiple autoincrement rows and get the IDs back from insert.
        rows = [{"name": "b3", "value": 30}, {"name": "b4", "value": 40}]
        ids = db.insert(tables.b, *rows, returnIds=True)
        results = [
            dict(r) for r in db.query(tables.b.select().where(
                tables.b.columns.id > results[1]["id"])).fetchall()
        ]
        expected = [dict(row, id=id) for row, id in zip(rows, ids)]
        self.assertCountEqual(results, expected)
        self.assertTrue(all(result["id"] is not None for result in results))
        # Insert multiple rows into a table with an autoincrement+origin
        # primary key, then use the returned IDs to insert into a dynamic
        # table.
        rows = [{
            "origin": db.origin,
            "b_id": results[0]["id"]
        }, {
            "origin": db.origin,
            "b_id": None
        }]
        ids = db.insert(tables.c, *rows, returnIds=True)
        results = [dict(r) for r in db.query(tables.c.select()).fetchall()]
        expected = [dict(row, id=id) for row, id in zip(rows, ids)]
        self.assertCountEqual(results, expected)
        self.assertTrue(all(result["id"] is not None for result in results))
        # Add the dynamic table.
        d = db.ensureTableExists("d", DYNAMIC_TABLE_SPEC)
        # Insert into it.
        rows = [{
            "c_origin": db.origin,
            "c_id": id,
            "a_name": "a1"
        } for id in ids]
        db.insert(d, *rows)
        results = [dict(r) for r in db.query(d.select()).fetchall()]
        self.assertCountEqual(rows, results)
        # Insert multiple rows into a table with an autoincrement+origin
        # primary key (this is especially tricky for SQLite, but good to test
        # for all DBs), but pass in a value for the autoincrement key.
        # For extra complexity, we re-use the autoincrement value with a
        # different value for origin.
        rows2 = [{
            "id": 700,
            "origin": db.origin,
            "b_id": None
        }, {
            "id": 700,
            "origin": 60,
            "b_id": None
        }, {
            "id": 1,
            "origin": 60,
            "b_id": None
        }]
        db.insert(tables.c, *rows2)
        results = [dict(r) for r in db.query(tables.c.select()).fetchall()]
        self.assertCountEqual(results, expected + rows2)
        self.assertTrue(all(result["id"] is not None for result in results))

        # Define 'SELECT COUNT(*)' query for later use.
        count = sqlalchemy.sql.select([sqlalchemy.sql.func.count()])
        # Get the values we inserted into table b.
        bValues = [dict(r) for r in db.query(tables.b.select()).fetchall()]
        # Remove two row from table b by ID.
        n = db.delete(tables.b, ["id"], {"id": bValues[0]["id"]},
                      {"id": bValues[1]["id"]})
        self.assertEqual(n, 2)
        # Remove the other two rows from table b by name.
        n = db.delete(tables.b, ["name"], {"name": bValues[2]["name"]},
                      {"name": bValues[3]["name"]})
        self.assertEqual(n, 2)
        # There should now be no rows in table b.
        self.assertEqual(db.query(count.select_from(tables.b)).scalar(), 0)
        # All b_id values in table c should now be NULL, because there's an
        # onDelete='SET NULL' foreign key.
        self.assertEqual(
            db.query(
                count.select_from(tables.c).where(
                    tables.c.columns.b_id != None)).scalar(),  # noqa:E711
            0)
        # Remove all rows in table a (there's only one); this should remove all
        # rows in d due to onDelete='CASCADE'.
        n = db.delete(tables.a, [])
        self.assertEqual(n, 1)
        self.assertEqual(db.query(count.select_from(tables.a)).scalar(), 0)
        self.assertEqual(db.query(count.select_from(d)).scalar(), 0)
Пример #30
0
 def process_result_value(self, value, dialect):
     if value is None:
         return None
     return ConvexPolygon.decode(super().process_result_value(
         value, dialect))
Пример #31
0
 def process_result_value(self, value, dialect):
     return ConvexPolygon.decode(
         b64decode(value)) if value is not None else None