def test_expanding_and_clipping(self): a = Box.fromDegrees(0, 0, 10, 10) b = (a.expandedTo(LonLat.fromDegrees(20, 20)).expandedTo( Box.fromDegrees(0, 0, 30, 10)).clippedTo( Box.fromDegrees(10, 10, 15, 15)).clippedTo(LonLat.fromDegrees(11, 11))) a.expandTo(LonLat.fromDegrees(20, 20)) a.expandTo(Box.fromDegrees(0, 0, 30, 10)) a.clipTo(Box.fromDegrees(10, 10, 15, 15)) a.clipTo(LonLat.fromDegrees(11, 11)) self.assertEqual(a, b) self.assertEqual(a, LonLat.fromDegrees(11, 11)) a.clipTo(LonLat.fromDegrees(0, 0)) self.assertTrue(a.isEmpty())
def testChunkBoundingBox(self): chunker = Chunker(200, 5) chunk_id = 3645 stripe = chunker.getStripe(chunk_id) chunk_in_stripe = chunker.getChunk(chunk_id, stripe) bbox = chunker.getChunkBoundingBox(stripe, chunk_in_stripe) sbbox = chunker.getSubChunkBoundingBox(0, 0) self.assertEqual(stripe, 9) self.assertEqual(chunk_in_stripe, 45) b = Box.fromRadians(5.048988193233824, -1.4294246573883558, 5.1611879309330035, -1.413716694110407) self.assertAlmostEqual(bbox, b) sb = Box.fromRadians(0.0, -1.5707963267948966, 6.283185307179586, -1.5676547341363067) self.assertAlmostEqual(sbbox, sb)
def testIntersecting(self): b = Box.fromDegrees(273.6, 30.7, 273.7180105379097, 30.722546655347717) c = Chunker(85, 12) self.assertEqual(c.getChunksIntersecting(b), [9630, 9631, 9797]) self.assertEqual(c.getSubChunksIntersecting(b), [(9630, [770]), (9631, [759]), (9797, [11])])
def test_center_and_dimensions(self): b = Box.fromDegrees(-90, -45, 90, 45) self.assertEqual(b.getCenter(), LonLat.fromDegrees(0, 0)) self.assertEqual(b.getWidth(), Angle.fromDegrees(180)) self.assertEqual(b.getHeight(), Angle.fromDegrees(90)) self.assertEqual(b.getLon().getA(), NormalizedAngle.fromDegrees(-90)) self.assertEqual(b.getLat().getB(), Angle.fromDegrees(45))
def test_relationships(self): b1 = Box.fromDegrees(90, 0, 180, 45) p = LonLat.fromDegrees(135, 10) self.assertTrue(p in b1) self.assertTrue(b1.contains(p)) b2 = Box.fromDegrees(135, 15, 135, 30) self.assertTrue(b1.contains(b2)) self.assertTrue(b2.isWithin(b1)) b3 = Box.fromDegrees(0, -45, 90, 0) u = UnitVector3d(1, 1, -1) self.assertTrue(b1.intersects(b3)) self.assertTrue(u in b3) self.assertTrue(b3.contains(u)) b4 = Box.fromDegrees(200, 10, 300, 20) self.assertTrue(b1.isDisjointFrom(b4)) r = b1.relate(LonLat.fromDegrees(135, 10)) self.assertEqual(r, CONTAINS) r = b4.relate(b1) self.assertEqual(r, DISJOINT)
def _roundTripThroughApdb(objects, sources, forcedSources, dateTime): """Run object and source catalogs through the Apdb to get the correct table schemas. Parameters ---------- objects : `pandas.DataFrame` Set of test DiaObjects to round trip. sources : `pandas.DataFrame` Set of test DiaSources to round trip. forcedSources : `pandas.DataFrame` Set of test DiaForcedSources to round trip. dateTime : `lsst.daf.base.DateTime` Time for the Apdb. Returns ------- objects : `pandas.DataFrame` Round tripped objects. sources : `pandas.DataFrame` Round tripped sources. """ tmpFile = tempfile.NamedTemporaryFile() apdbConfig = ApdbSqlConfig() apdbConfig.db_url = "sqlite:///" + tmpFile.name apdbConfig.dia_object_index = "baseline" apdbConfig.dia_object_columns = [] apdb = ApdbSql(config=apdbConfig) apdb.makeSchema() wholeSky = Box.full() diaObjects = pd.concat([apdb.getDiaObjects(wholeSky), objects]) diaSources = pd.concat( [apdb.getDiaSources(wholeSky, [], dateTime), sources]) diaForcedSources = pd.concat( [apdb.getDiaForcedSources(wholeSky, [], dateTime), forcedSources]) apdb.store(dateTime, diaObjects, diaSources, diaForcedSources) diaObjects = apdb.getDiaObjects(wholeSky) diaSources = apdb.getDiaSources(wholeSky, np.unique(diaObjects["diaObjectId"]), dateTime) diaForcedSources = apdb.getDiaForcedSources( wholeSky, np.unique(diaObjects["diaObjectId"]), dateTime) diaObjects.set_index("diaObjectId", drop=False, inplace=True) diaSources.set_index(["diaObjectId", "filterName", "diaSourceId"], drop=False, inplace=True) diaForcedSources.set_index(["diaObjectId"], drop=False, inplace=True) return (diaObjects, diaSources, diaForcedSources)
def test_string(self): b = Box.fromRadians(0, 0, 1, 1) self.assertEqual(str(b), 'Box([0.0, 1.0], [0.0, 1.0])') self.assertEqual( repr(b), 'Box(NormalizedAngleInterval.fromRadians(0.0, 1.0), ' 'AngleInterval.fromRadians(0.0, 1.0))' ) self.assertEqual(b, eval(repr(b), dict( AngleInterval=AngleInterval, Box=Box, NormalizedAngleInterval=NormalizedAngleInterval )))
def test_comparison_operators(self): self.assertEqual(Box(LonLat.fromDegrees(45, 45)), LonLat.fromDegrees(45, 45)) self.assertEqual(Box.fromDegrees(90, -45, 180, 45), Box(NormalizedAngleInterval.fromDegrees(90, 180), AngleInterval.fromDegrees(-45, 45))) self.assertNotEqual(Box(LonLat.fromDegrees(45, 45)), LonLat.fromDegrees(45, 90)) self.assertNotEqual(Box.fromDegrees(90, -45, 180, 45), Box.fromDegrees(90, -45, 180, 90))
def test_construction(self): b = Box(Box.allLongitudes(), Box.allLatitudes()) self.assertTrue(b.isFull()) b = Box.fromDegrees(-90, -45, 90, 45) self.assertEqual(b, Box(b.getLon(), b.getLat())) a = Box.fromRadians(-0.5 * math.pi, -0.25 * math.pi, 0.5 * math.pi, 0.25 * math.pi) b = Box(LonLat.fromRadians(-0.5 * math.pi, -0.25 * math.pi), LonLat.fromRadians(0.5 * math.pi, 0.25 * math.pi)) c = Box(LonLat.fromRadians(0, 0), Angle(0.5 * math.pi), Angle(0.25 * math.pi)) d = c.clone() self.assertEqual(a, b) self.assertEqual(b, c) self.assertEqual(c, d) self.assertNotEqual(id(c), id(d)) b = Box() self.assertTrue(b.isEmpty()) self.assertTrue(Box.empty().isEmpty()) self.assertTrue(Box.full().isFull())
def test_pickle(self): a = Box.fromDegrees(0, 0, 10, 10) b = pickle.loads(pickle.dumps(a, pickle.HIGHEST_PROTOCOL)) self.assertEqual(a, b)
def test_codec(self): b = Box.fromRadians(0, 0, 1, 1) s = b.encode() self.assertEqual(Box.decode(s), b) self.assertEqual(Region.decode(s), b)
def test_dilation_and_erosion(self): a = Box.fromRadians(0.5, -0.5, 1.5, 0.5) b = a.dilatedBy(Angle(0.5), Angle(0.5)).erodedBy(Angle(1), Angle(1)) a.dilateBy(Angle(0.5), Angle(0.5)).erodeBy(Angle(1), Angle(1)) self.assertEqual(a, b) self.assertEqual(a, LonLat.fromRadians(1, 0))
def test_yaml(self): a = Box.fromDegrees(0, 0, 10, 10) b = yaml.safe_load(yaml.dump(a)) self.assertEqual(a, b)
def testSkyMapDimensions(self): """Test involving only skymap dimensions, no joins to instrument""" registry = self.registry # need a bunch of dimensions and datasets for test, we want # "abstract_filter" in the test so also have to add physical_filter # dimensions registry.addDimensionEntry("instrument", dict(instrument="DummyCam")) registry.addDimensionEntry("physical_filter", dict(instrument="DummyCam", physical_filter="dummy_r", abstract_filter="r")) registry.addDimensionEntry("physical_filter", dict(instrument="DummyCam", physical_filter="dummy_i", abstract_filter="i")) registry.addDimensionEntry("skymap", dict(skymap="DummyMap", hash="sha!".encode("utf8"))) for tract in range(10): registry.addDimensionEntry("tract", dict(skymap="DummyMap", tract=tract)) for patch in range(10): registry.addDimensionEntry("patch", dict(skymap="DummyMap", tract=tract, patch=patch, cell_x=0, cell_y=0, region=Box(LonLat(NormalizedAngle(0.), Angle(0.))))) # dataset types collection = "test" run = registry.makeRun(collection=collection) storageClass = StorageClass("testDataset") registry.storageClasses.registerStorageClass(storageClass) calexpType = DatasetType(name="deepCoadd_calexp", dimensions=registry.dimensions.extract(("skymap", "tract", "patch", "abstract_filter")), storageClass=storageClass) registry.registerDatasetType(calexpType) mergeType = DatasetType(name="deepCoadd_mergeDet", dimensions=registry.dimensions.extract(("skymap", "tract", "patch")), storageClass=storageClass) registry.registerDatasetType(mergeType) measType = DatasetType(name="deepCoadd_meas", dimensions=registry.dimensions.extract(("skymap", "tract", "patch", "abstract_filter")), storageClass=storageClass) registry.registerDatasetType(measType) dimensions = registry.dimensions.empty.union(calexpType.dimensions, mergeType.dimensions, measType.dimensions, implied=True) # add pre-existing datasets for tract in (1, 3, 5): for patch in (2, 4, 6, 7): dataId = dict(skymap="DummyMap", tract=tract, patch=patch) registry.addDataset(mergeType, dataId=dataId, run=run) for aFilter in ("i", "r"): dataId = dict(skymap="DummyMap", tract=tract, patch=patch, abstract_filter=aFilter) registry.addDataset(calexpType, dataId=dataId, run=run) # with empty expression builder = DataIdQueryBuilder.fromDimensions(registry, dimensions) builder.requireDataset(calexpType, collections=[collection]) builder.requireDataset(mergeType, collections=[collection]) rows = list(builder.execute()) self.assertEqual(len(rows), 3*4*2) # 4 tracts x 4 patches x 2 filters for dataId in rows: self.assertCountEqual(dataId.keys(), ("skymap", "tract", "patch", "abstract_filter")) self.assertCountEqual(set(dataId["tract"] for dataId in rows), (1, 3, 5)) self.assertCountEqual(set(dataId["patch"] for dataId in rows), (2, 4, 6, 7)) self.assertCountEqual(set(dataId["abstract_filter"] for dataId in rows), ("i", "r")) # limit to 2 tracts and 2 patches builder = DataIdQueryBuilder.fromDimensions(registry, dimensions) builder.requireDataset(calexpType, collections=[collection]) builder.requireDataset(mergeType, collections=[collection]) builder.whereParsedExpression("tract IN (1, 5) AND patch.patch IN (2, 7)") rows = list(builder.execute()) self.assertEqual(len(rows), 2*2*2) # 4 tracts x 4 patches x 2 filters self.assertCountEqual(set(dataId["tract"] for dataId in rows), (1, 5)) self.assertCountEqual(set(dataId["patch"] for dataId in rows), (2, 7)) self.assertCountEqual(set(dataId["abstract_filter"] for dataId in rows), ("i", "r")) # limit to single filter builder = DataIdQueryBuilder.fromDimensions(registry, dimensions) builder.requireDataset(calexpType, collections=[collection]) builder.requireDataset(mergeType, collections=[collection]) builder.whereParsedExpression("abstract_filter = 'i'") rows = list(builder.execute()) self.assertEqual(len(rows), 3*4*1) # 4 tracts x 4 patches x 2 filters self.assertCountEqual(set(dataId["tract"] for dataId in rows), (1, 3, 5)) self.assertCountEqual(set(dataId["patch"] for dataId in rows), (2, 4, 6, 7)) self.assertCountEqual(set(dataId["abstract_filter"] for dataId in rows), ("i",)) # expression excludes everything, specifying non-existing skymap is # not a fatal error, it's operator error builder = DataIdQueryBuilder.fromDimensions(registry, dimensions) builder.requireDataset(calexpType, collections=[collection]) builder.requireDataset(mergeType, collections=[collection]) builder.whereParsedExpression("skymap = 'Mars'") rows = list(builder.execute()) self.assertEqual(len(rows), 0)