def testCopy(self): dest = dafBase.PropertyList() source = dafBase.PropertyList() value1 = [1.5, 3.2] source.set("srcItem1", value1) dest.copy("destItem1", source, "srcItem1") self.assertEqual(dest.get("destItem1"), value1[-1]) self.assertEqual(dest.getArray("destItem1"), value1) self.assertEqual(dest.getScalar("destItem1"), value1[-1]) self.assertEqual(dest.valueCount(), 2) # items are replaced, regardless of type dest.set("destItem2", "string value") self.assertEqual(dest.valueCount(), 3) value2 = [5, -4, 3] source.set("srcItem2", value2) dest.copy("destItem2", source, "srcItem2") self.assertEqual(dest.get("destItem2"), value2[-1]) self.assertEqual(dest.getArray("destItem2"), value2) self.assertEqual(dest.getScalar("destItem2"), value2[-1]) self.assertEqual(dest.valueCount(), 5) # asScalar copies only the last value dest.copy("destItem2Scalar", source, "srcItem2", asScalar=True) self.assertEqual(dest.get("destItem2Scalar"), value2[-1]) self.assertEqual(dest.getArray("destItem2Scalar"), [value2[-1]]) self.assertEqual(dest.getScalar("destItem2Scalar"), value2[-1]) self.assertEqual(dest.valueCount(), 6)
def testCombineThrow(self): apl = dafBase.PropertyList() apl.set("int", 42) aplp = dafBase.PropertyList() aplp.set("int", 3.14159) with self.assertRaises(TypeError): apl.combine(aplp) psd = {"bool": True} with self.assertRaises(TypeError): apl.combine(psd)
def testUpdate(self): apl = dafBase.PropertyList() apl.set("apl1.pre", 1) apl.set("apl1.post", 2) apl.set("int", 42) apl.set("double", 3.14) apl.set("apl2.plus", 10.24) apl.set("apl2.minus", -10.24) apl.set("apl3.sub.subsub", "foo") aplp = dafBase.PropertyList() aplp.set("apl1.pre", 3) aplp.add("apl1.pre", 4) aplp.set("int", 2008) aplp.set("apl2.foo", "bar") aplp.set("apl4.top", "bottom") apl.update(aplp) self.assertFalse(apl.isArray("apl1")) self.assertTrue(apl.isArray("apl1.pre")) self.assertFalse(apl.isArray("apl1.post")) self.assertFalse(apl.isArray("apl2")) self.assertFalse(apl.isArray("apl2.plus")) self.assertFalse(apl.isArray("apl2.minus")) self.assertFalse(apl.isArray("apl2.foo")) self.assertFalse(apl.isArray("apl3")) self.assertFalse(apl.isArray("apl3.sub")) self.assertFalse(apl.isArray("apl3.subsub")) self.assertFalse(apl.isArray("apl4")) self.assertFalse(apl.isArray("apl4.top")) self.assertFalse(apl.isArray("int")) self.assertFalse(apl.isArray("double")) self.assertEqual(apl.valueCount("apl1.pre"), 2) self.assertEqual(apl.valueCount("int"), 1) v = apl.getArray("apl1.pre") self.assertEqual(v, [3, 4]) v = apl.getArray("int") self.assertEqual(v, [2008]) self.assertEqual(apl.valueCount(), 10) apld = {"int": 100, "str": "String", "apl1.foo": 10.5} apl.update(apld) self.assertEqual(apl["int"], apld["int"]) self.assertEqual(apl["str"], apld["str"]) self.assertEqual(apl["apl1.foo"], apld["apl1.foo"]) self.assertEqual(apl["double"], 3.14) self.assertEqual(apl.valueCount(), 12)
def __init__(self, butler=None, **kwargs): """!Construct an ImageDifference Task @param[in] butler Butler object to use in constructing reference object loaders """ pipeBase.CmdLineTask.__init__(self, **kwargs) self.makeSubtask("subtract") self.makeSubtask("getTemplate") self.makeSubtask("decorrelate") if self.config.doUseRegister: self.makeSubtask("register") self.schema = afwTable.SourceTable.makeMinimalSchema() if self.config.doSelectSources: self.makeSubtask("sourceSelector", schema=self.schema) self.makeSubtask('refObjLoader', butler=butler) self.makeSubtask("astrometer", refObjLoader=self.refObjLoader) self.algMetadata = dafBase.PropertyList() if self.config.doDetection: self.makeSubtask("detection", schema=self.schema) if self.config.doMeasurement: if not self.config.doDipoleFitting: self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata) else: self.makeSubtask("measurement", schema=self.schema) if self.config.doMatchSources: self.schema.addField("refMatchId", "L", "unique id of reference catalog match") self.schema.addField("srcMatchId", "L", "unique id of source match")
def setUp(self): # Pick arbitrary numbers to create a detector object, and a synthetic # dataset. The particular numbers have no special meaning to the test # and be anything as long as they are self consistent (i.e. the # fake source is inside the bounding box) self.center = lsst.afw.geom.Point2D(50.1, 49.8) self.bbox = lsst.afw.geom.Box2I(lsst.afw.geom.Point2I(-20, -30), lsst.afw.geom.Extent2I(140, 160)) self.dataset = lsst.meas.base.tests.TestDataset(self.bbox) self.dataset.addSource(100000.0, self.center) md = dafBase.PropertyList() for k, v in ( ("EQUINOX", 2000.0), ("CRPIX1", 5353.0), ("CRPIX2", -35.0), ("CD1_1", 0.0), ("CD1_2", -5.611E-05), ("CD2_1", -5.611E-05), ("CD2_2", -0.0), ("CRVAL1", 4.5789875), ("CRVAL2", 16.30004444), ("CUNIT1", 'deg'), ("CUNIT2", 'deg'), ("CTYPE1", 'RA---TAN'), ("CTYPE2", 'DEC--TAN'), ): md.set(k, v) self.wcs = afwImage.makeWcs(md)
def testGetScalarThrow(self): apl = dafBase.PropertyList() apl.setBool("bool", True) apl.setShort("short", 42) apl.setInt("int", 2008) apl.setLongLong("int64_t", 0xfeeddeadbeef) apl.setFloat("float", 3.14159) apl.setDouble("double", 2.718281828459045) apl.setString("string", "bar") with self.assertRaises(KeyError): apl["foo"] with self.assertRaises(TypeError): apl.getBool("short") with self.assertRaises(TypeError): apl.getBool("int") with self.assertRaises(TypeError): apl.getShort("int") with self.assertRaises(TypeError): apl.getInt("short") with self.assertRaises(TypeError): apl.getInt("bool") with self.assertRaises(TypeError): apl.getDouble("float") with self.assertRaises(TypeError): apl.getFloat("double") with self.assertRaises(TypeError): apl.getString("int")
def writeFits(self, fileName, flags=0): """Save our list of Backgrounds to a file @param fileName FITS file to write @param flags Flags to control details of writing; currently unused, but present for consistency with afw.table.BaseCatalog.writeFits. """ for i, bkgd in enumerate(self): bkgd, interpStyle, undersampleStyle = bkgd statsImage = bkgd.getStatsImage() md = dafBase.PropertyList() md.set("INTERPSTYLE", interpStyle) md.set("UNDERSAMPLESTYLE", undersampleStyle) bbox = bkgd.getImageBBox() md.set("BKGD_X0", bbox.getMinX()) md.set("BKGD_Y0", bbox.getMinY()) md.set("BKGD_WIDTH", bbox.getWidth()) md.set("BKGD_HEIGHT", bbox.getHeight()) statsImage.getImage().writeFits( fileName, md, "w" if i == 0 else "a") statsImage.getMask().writeFits( fileName, md, "a") statsImage.getVariance().writeFits(fileName, md, "a")
def __init__(self, config=None, name=None, parentTask=None, log=None): self.metadata = dafBase.PropertyList() self._parentTask = parentTask if parentTask is not None: if name is None: raise RuntimeError("name is required for a subtask") self._name = name self._fullName = parentTask._computeFullName(name) if config is None: config = getattr(parentTask.config, name) self._taskDict = parentTask._taskDict loggerName = parentTask.log.getName() + '.' + name else: if name is None: name = getattr(self, "_DefaultName", None) if name is None: raise RuntimeError( "name is required for a task unless it has attribute _DefaultName" ) name = self._DefaultName self._name = name self._fullName = self._name if config is None: config = self.ConfigClass() self._taskDict = dict() loggerName = self._fullName if log is not None and log.getName(): loggerName = log.getName() + '.' + loggerName self.log = Log.getLogger(loggerName) self.config = config self._display = lsstDebug.Info(self.__module__).display self._taskDict[self._fullName] = self
def runQuantum(self, butlerQC, inputRefs, outputRefs): visit = butlerQC.quantum.dataId['visit'] schema = afwTable.ExposureTable.makeMinimalSchema() schema.addField('visit', type='L', doc='visit number') metadata = dafBase.PropertyList() metadata.add("COMMENT", "Catalog id is detector id, sorted") metadata.add("COMMENT", "Only detectors with data have entries") photoCalibCat = afwTable.ExposureCatalog(schema) photoCalibCat.setMetadata(metadata) photoCalibCat.reserve(len(inputRefs.photoCalibList)) photoCalibList = butlerQC.get(inputRefs.photoCalibList) for dataRef in photoCalibList: detector = dataRef.dataId['detector'] photoCalib = dataRef.get() rec = photoCalibCat.addNew() rec['id'] = detector rec['visit'] = visit rec.setPhotoCalib(photoCalib) photoCalibCat.sort() butlerQC.put(photoCalibCat, outputRefs.photoCalibGlobalCatalog)
def testGetVector(self): apl = dafBase.PropertyList() v = [42, 2008, 1] apl.setInt("ints", v) apl.setInt("ints2", [10, 9, 8]) w = apl.getArrayInt("ints") self.assertEqual(len(w), 3) self.assertEqual(v, w) self.assertEqual(apl.getInt("ints2"), 8) self.assertEqual(apl.getArrayInt("ints2"), [10, 9, 8]) w = apl.get("ints") self.assertIsInstance(w, int) self.assertEqual(v[-1], w) self.assertEqual(apl["ints"], v[-1]) self.assertEqual(apl.getArray("ints"), v) self.assertEqual(apl.getScalar("ints"), v[-1]) self.assertEqual(apl.valueCount(), 6) apl.setInt("int", 999) x = apl.get("int") self.assertEqual(x, 999) self.assertEqual(apl.getArray("int"), [999]) self.assertEqual(apl.getScalar("int"), 999) self.assertEqual(apl["int"], 999) self.assertEqual(apl.valueCount(), 7) self.checkPickle(apl)
def __init__(self, *args, **kwargs): """!Create the ImagePsfMatchTask \param *args arguments to be passed to lsst.ip.diffim.PsfMatchTask.__init__ \param **kwargs keyword arguments to be passed to lsst.ip.diffim.PsfMatchTask.__init__ Upon initialization, the kernel configuration is defined by self.config.kernel.active. The task creates an lsst.afw.math.Warper from the subConfig self.config.kernel.active.warpingConfig. A schema for the selection and measurement of candidate lsst.ip.diffim.KernelCandidates is defined, and used to initize subTasks selectDetection (for candidate detection) and selectMeasurement (for candidate measurement). """ PsfMatchTask.__init__(self, *args, **kwargs) self.kConfig = self.config.kernel.active self._warper = afwMath.Warper.fromConfig(self.kConfig.warpingConfig) # the background subtraction task uses a config from an unusual location, # so cannot easily be constructed with makeSubtask self.background = SubtractBackgroundTask( config=self.kConfig.afwBackgroundConfig, name="background", parentTask=self) self.selectSchema = afwTable.SourceTable.makeMinimalSchema() self.selectAlgMetadata = dafBase.PropertyList() self.makeSubtask("selectDetection", schema=self.selectSchema) self.makeSubtask("selectMeasurement", schema=self.selectSchema, algMetadata=self.selectAlgMetadata)
def testScalar(self): apl = dafBase.PropertyList() apl.setBool("bool", True) apl.setShort("short", 42) apl.setInt("int", 2008) apl.setLongLong("int64_t", 0xfeeddeadbeef) apl.setFloat("float", 3.14159) apl.setDouble("double", 2.718281828459045) apl.set("char*", "foo") apl.setString("string", "bar") apl.set("int2", 2009) apl.set( "dt", dafBase.DateTime("20090402T072639.314159265Z", dafBase.DateTime.UTC)) apl.set("subclass", FloatSubClass(1.23456789)) apl.set("undef", None) self.assertTrue(apl.isUndefined("undef")) self.assertFalse(apl.isUndefined("string")) self.assertEqual(apl.typeOf("bool"), dafBase.PropertyList.TYPE_Bool) self.assertEqual(apl.getBool("bool"), True) self.assertEqual(apl.typeOf("short"), dafBase.PropertyList.TYPE_Short) self.assertEqual(apl.getShort("short"), 42) self.assertEqual(apl.typeOf("int"), dafBase.PropertyList.TYPE_Int) self.assertEqual(apl.getInt("int"), 2008) self.assertEqual(apl.typeOf("int64_t"), dafBase.PropertyList.TYPE_LongLong) self.assertEqual(apl.getLongLong("int64_t"), 0xfeeddeadbeef) self.assertEqual(apl.typeOf("float"), dafBase.PropertyList.TYPE_Float) self.assertAlmostEqual(apl.getFloat("float"), 3.14159, 6) self.assertEqual(apl.typeOf("double"), dafBase.PropertyList.TYPE_Double) self.assertEqual(apl.getDouble("double"), 2.718281828459045) self.assertEqual(apl.typeOf("char*"), dafBase.PropertyList.TYPE_String) self.assertEqual(apl.getString("char*"), "foo") self.assertEqual(apl.typeOf("string"), dafBase.PropertyList.TYPE_String) self.assertEqual(apl.getString("string"), "bar") self.assertEqual(apl.typeOf("int2"), dafBase.PropertyList.TYPE_Int) self.assertEqual(apl.getInt("int2"), 2009) self.assertEqual(apl.get("int2"), 2009) self.assertEqual(apl.getArray("int2"), [2009]) self.assertEqual(apl.getScalar("int2"), 2009) self.assertEqual(apl.typeOf("dt"), dafBase.PropertyList.TYPE_DateTime) self.assertEqual(apl.getDateTime("dt").nsecs(), 1238657233314159265) self.assertEqual(apl.getDouble("subclass"), 1.23456789) self.assertEqual(apl["int2"], 2009) self.assertIsNone(apl.getScalar("undef")) self.assertEqual(apl.typeOf("undef"), dafBase.PropertyList.TYPE_Undef) self.assertIsNone(apl.get("undef")) self.assertIsNone(apl["undef"]) self.checkPickle(apl) # Now replace the undef value with a defined value apl.set("undef", "not undefined") self.assertEqual(apl.getScalar("undef"), "not undefined") self.assertFalse(apl.isUndefined("undef")) self.assertEqual(apl.typeOf("undef"), dafBase.PropertyList.TYPE_String)
def testToOrderedDict(self): from collections import OrderedDict apl = dafBase.PropertyList() apl.set("SIMPLE", True) apl.set("BITPIX", -32) apl.set("NAXIS", 2) apl.set("RA", 3.14159) apl.set("DEC", 2.71828) apl.set("COMMENT", "This is a test") apl.add("COMMENT", "This is a test line 2") correct = OrderedDict([("SIMPLE", True), ("BITPIX", -32), ("NAXIS", 2), ("RA", 3.14159), ("DEC", 2.71828), ("COMMENT", ["This is a test", "This is a test line 2"])]) self.assertEqual(apl.toOrderedDict(), correct) apl.set("NAXIS1", 513) correct["NAXIS1"] = 513 self.assertEqual(apl.toOrderedDict(), correct) apl.set("RA", 1.414) correct["RA"] = 1.414 self.assertEqual(apl.toOrderedDict(), correct) apl.set("DEC", 1.732) correct["DEC"] = 1.732 self.assertEqual(apl.toOrderedDict(), correct) apl.set("DEC", -6.28) correct["DEC"] = -6.28 self.assertEqual(apl.toOrderedDict(), correct) apl.add("COMMENT", "This is a test line 3") correct["COMMENT"] = correct["COMMENT"] + [ "This is a test line 3", ] self.assertEqual(apl.toOrderedDict(), correct)
def testRejectBlends(self): """Test the PcaPsfDeterminer blend removal We give it a single blended source, asking it to remove blends, and check that it barfs in the expected way. """ factory = measAlg.psfDeterminerRegistry["pca"] config = factory.ConfigClass() config.doRejectBlends = True psfDeterminer = factory(config) schema = afwTable.SourceTable.makeMinimalSchema() schema.addField("position", afwGeom.Point2D, doc="Position") schema.addField("flux.psf", float, doc="psf") schema.addField("flux.psf.flags", "Flag", doc="psf") catalog = afwTable.SourceCatalog(schema) catalog.defineCentroid("position") catalog.definePsfFlux("flux.psf") source = catalog.addNew() foot = afwDetection.Footprint(afwGeom.Point2I(123, 45), 6, self.exposure.getBBox()) foot.addPeak(123, 45, 6) foot.addPeak(126, 47, 5) source.setFootprint(foot) candidates = [measAlg.makePsfCandidate(source, self.exposure)] metadata = dafBase.PropertyList() with self.assertRaises(RuntimeError) as cm: psfDeterminer.determinePsf(self.exposure, candidates, metadata) self.assertEqual(cm.exception.message, "All PSF candidates removed as blends")
def writeFits(self, fileName, flags=0): """Save our list of Backgrounds to a file. Parameters ----------- fileName : `str` FITS file to write flags : `int` Flags to control details of writing; currently unused, but present for consistency with `lsst.afw.table.BaseCatalog.writeFits`. """ for i, bkgd in enumerate(self): (bkgd, interpStyle, undersampleStyle, approxStyle, approxOrderX, approxOrderY, approxWeighting) = bkgd statsImage = bkgd.getStatsImage() md = dafBase.PropertyList() md.set("INTERPSTYLE", int(interpStyle)) md.set("UNDERSAMPLESTYLE", int(undersampleStyle)) md.set("APPROXSTYLE", int(approxStyle)) md.set("APPROXORDERX", approxOrderX) md.set("APPROXORDERY", approxOrderY) md.set("APPROXWEIGHTING", approxWeighting) bbox = bkgd.getImageBBox() md.set("BKGD_X0", bbox.getMinX()) md.set("BKGD_Y0", bbox.getMinY()) md.set("BKGD_WIDTH", bbox.getWidth()) md.set("BKGD_HEIGHT", bbox.getHeight()) statsImage.getImage().writeFits(fileName, md, "w" if i == 0 else "a") statsImage.getMask().writeFits(fileName, md, "a") statsImage.getVariance().writeFits(fileName, md, "a")
def prepCatalog(self, inputs): """Prepare and return the output catalog """ outCat = lsst.afw.table.SourceCatalog(self.schema) #mcCat = lsst.afw.table.SourceCatalog(self.schema) metadata = dafBase.PropertyList() outCat.getTable().setMetadata(metadata) srcCat = inputs.sources exposurePsf = inputs.exposure.getPsf() exposureCalib = inputs.exposure.getCalib() # SchemaMapper will transfer ID, Coord, Footprint mapper = lsst.afw.table.SchemaMapper(srcCat.schema, self.schema) # create output catalog iRecords = [] for i, srcRecord in enumerate(srcCat): if self.selection(srcRecord, srcRecord) is False: continue for j in range(self.config.multiplicity): outRecord = outCat.addNew() iRecords.append(i) outRecord.assign(srcRecord, mapper) outRecord.setCoord(srcRecord.getCoord()) outRecord.setId(srcRecord.getId()) return outCat, iRecords
def __init__(self): self.tanWcs = None self.tanMatches = None self.sipWcs = None self.sipMatches = None self.refCat = None self.matchMeta = dafBase.PropertyList() self.solveQa = None
def testComment(self): apl = dafBase.PropertyList() apl.set("NAXIS", 2, "two-dimensional") self.assertEqual(apl.get("NAXIS"), 2) self.assertEqual(apl.getComment("NAXIS"), "two-dimensional") apl.set("NAXIS", 3, "three-dimensional") self.assertEqual(apl.get("NAXIS"), 3) self.assertEqual(apl.getComment("NAXIS"), "three-dimensional")
def testDateTimeToString(self): apl = dafBase.PropertyList() apl.set( "dt", dafBase.DateTime("20090402T072639.314159265Z", dafBase.DateTime.UTC)) self.assertEqual(apl.toString(), "dt = 2009-04-02T07:26:39.314159265Z\n")
def testCombineHierarchical(self): # Test that we can perform a deep copy of a PropertyList containing a # hierarchical (contains a '.') key. # This was a segfault prior to addressing DM-882. pl1 = dafBase.PropertyList() pl1.set("a.b", 1) pl2 = pl1.deepCopy() # should not segfault self.assertEqual(pl1.get("a.b"), pl2.get("a.b"))
def __init__(self): schema = afwTable.SimpleTable.makeMinimalSchema() schema.addField("radius", "Angle", "radius of mask") self._catalog = afwTable.SimpleCatalog(schema) self._catalog.table.setMetadata(dafBase.PropertyList()) self.table = self._catalog.table self.addNew = self._catalog.addNew
def __init__(self, *args, **kwargs): pipeBase.Task.__init__(self, *args, **kwargs) self.makeSubtask("repair") self.makeSubtask("diffim") self.schema = afwTable.SourceTable.makeMinimalSchema() self.algMetadata = dafBase.PropertyList() self.makeSubtask("detection", schema=self.schema) if self.config.doMeasurement: self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata)
def testGetAs(self): apl = dafBase.PropertyList() apl.set("bool", True) s = 42 apl.setShort("short", s) apl.set("int", 2008) apl.set("int64_t", 0xfeeddeadbeef) f = 3.14159 apl.setFloat("float", f) d = 2.718281828459045 apl.setDouble("double", d) apl.setString("char*", "foo") apl.set("char*2", "foo2") apl.set("string", "bar") aplp = dafBase.PropertyList() aplp.set("bottom", "x") apl.set("top", aplp) self.assertEqual(apl.getAsBool("bool"), True) self.assertEqual(apl.getAsInt("bool"), 1) self.assertEqual(apl.getAsInt("short"), 42) self.assertEqual(apl.getAsInt("int"), 2008) with self.assertRaises(TypeError): apl.getAsInt("int64_t") self.assertEqual(apl.getAsInt64("bool"), 1) self.assertEqual(apl.getAsInt64("short"), 42) self.assertEqual(apl.getAsInt64("int"), 2008) self.assertEqual(apl.getAsInt64("int64_t"), 0xfeeddeadbeef) with self.assertRaises(TypeError): apl.getAsInt64("float") self.assertEqual(apl.getAsDouble("bool"), 1.0) self.assertEqual(apl.getAsDouble("short"), 42.0) self.assertEqual(apl.getAsDouble("int"), 2008.0) self.assertEqual(apl.getAsDouble("int64_t"), float(0xfeeddeadbeef)) self.assertAlmostEqual(apl.getAsDouble("float"), 3.14159, places=5) self.assertEqual(apl.getAsDouble("double"), 2.718281828459045) with self.assertRaises(TypeError): apl.getAsDouble("char*") self.assertEqual(apl.getAsString("char*"), "foo") self.assertEqual(apl.getAsString("char*2"), "foo2") self.assertEqual(apl.getAsString("string"), "bar") with self.assertRaises(TypeError): apl.getAsString("int") self.assertEqual(apl.getAsString("top.bottom"), "x")
def testPiffDeterminer(self): """Test the (Piff) psfDeterminer on subImages""" self.setupDeterminer(self.exposure) metadata = dafBase.PropertyList() stars = self.starSelector.run(self.catalog, exposure=self.exposure) psfCandidateList = self.makePsfCandidates.run( stars.sourceCat, exposure=self.exposure).psfCandidates psf, cellSet = self.psfDeterminer.determinePsf(self.exposure, psfCandidateList, metadata, flagKey=self.usePsfFlag) self.exposure.setPsf(psf) self.assertEqual(len(psfCandidateList), metadata['numAvailStars']) self.assertEqual(sum(self.catalog['use_psf']), metadata['numGoodStars']) # Test how well we can subtract the PSF model self.subtractStars(self.exposure, self.catalog, chi_lim=5.6) # Test bboxes for point in [ psf.getAveragePosition(), geom.Point2D(), geom.Point2D(1, 1) ]: self.assertEqual(psf.computeBBox(point), psf.computeKernelImage(point).getBBox()) self.assertEqual(psf.computeKernelBBox(point), psf.computeKernelImage(point).getBBox()) self.assertEqual(psf.computeImageBBox(point), psf.computeImage(point).getBBox()) # Some roundtrips with lsst.utils.tests.getTempFilePath(".fits") as tmpFile: self.exposure.writeFits(tmpFile) fitsIm = afwImage.ExposureF(tmpFile) copyIm = copy.deepcopy(self.exposure) for newIm in [fitsIm, copyIm]: # Piff doesn't enable __eq__ for its results, so we just check # that some PSF images come out the same. for point in [ geom.Point2D(0, 0), geom.Point2D(10, 100), geom.Point2D(-200, 30), geom.Point2D(float("nan")) # "nullPoint" ]: self.assertImagesAlmostEqual( psf.computeImage(point), newIm.getPsf().computeImage(point)) # Also check using default position self.assertImagesAlmostEqual(psf.computeImage(), newIm.getPsf().computeImage())
def prepCatalog(self, inputs): """Prepare and return the output catalog """ outCat = lsst.afw.table.SourceCatalog(self.schema) metadata = dafBase.PropertyList() outCat.getTable().setMetadata(metadata) srcCat = inputs.sources exposurePsf = inputs.exposure.getPsf() exposureCalib = inputs.exposure.getCalib() # SchemaMapper will transfer ID, Coord, Footprint mapper = lsst.afw.table.SchemaMapper(srcCat.schema, self.schema) # Calculate the noise variance in the image sctrl = lsst.afw.math.StatisticsControl() sctrl.setNumIter(3) sctrl.setNumSigmaClip(5.0) mask = inputs.exposure.getMaskedImage().getMask() image = inputs.exposure.getMaskedImage().getImage() #sctrl.setAndMask(map(lambda x, y: x | mask.getPlaneBitMask(y), # afwImage.Mask().getMaskPlaneDict().keys(), 0x0)) sctrl.setNanSafe(True) stats = lsst.afw.math.makeStatistics( image, lsst.afw.math.VARIANCECLIP | lsst.afw.math.MEANCLIP) variance = stats.getValue(lsst.afw.math.VARIANCECLIP) mean = stats.getValue(lsst.afw.math.MEANCLIP) outCat.getTable().getMetadata().set('noise_mean', mean) outCat.getTable().getMetadata().set('noise_variance', variance) if self.config.useCorrelatedNoise: data = numpy.genfromtxt(self.config.correlatedNoiseFile) outCat.getTable().getMetadata().set('kData', data[:, 0]) outCat.getTable().getMetadata().set('psData', data[:, 1]) if self.config.calculateVariance: x0 = inputs.exposure.getXY0().getX() y0 = inputs.exposure.getXY0().getY() self.xy0 = afwGeom.Extent2I(-x0, -y0) for srcRecord in srcCat: outRecord = outCat.addNew() #outRecord.set(self.psfSizeKey, psfSize) # Start by setting some miscellaneous calculated fields outRecord.assign(srcRecord, mapper) outRecord.setCoord(srcRecord.getCoord()) # Increase footprint region? if self.config.nGrow > 0: outRecord.setFootprint( self.setupFitRegion(inputs.exposure, srcRecord, self.config.nGrow)) return outCat
def run(self, inputExp): """Mask ISR processed FLAT exposures to ensure consistent statistics. Parameters ---------- inputExp : `lsst.afw.image.Exposure` Post-ISR processed exposure to measure. Returns ------- outputStats : `lsst.daf.base.PropertyList` List containing the statistics. """ if self.config.doVignette: VignetteExposure(inputExp, doUpdateMask=True, maskPlane='BAD', doSetValue=False, log=self.log) mask = inputExp.getMask() maskVal = mask.getPlaneBitMask(self.config.maskNameList) statsControl = afwMath.StatisticsControl(self.config.numSigmaClip, self.config.clipMaxIter, maskVal) statsControl.setAndMask(maskVal) outputStats = dafBase.PropertyList() # Detector level: stats = afwMath.makeStatistics( inputExp.getMaskedImage(), afwMath.MEANCLIP | afwMath.STDEVCLIP | afwMath.NPOINT, statsControl) outputStats['DETECTOR_MEDIAN'] = stats.getValue(afwMath.MEANCLIP) outputStats['DETECTOR_SIGMA'] = stats.getValue(afwMath.STDEVCLIP) outputStats['DETECTOR_N'] = stats.getValue(afwMath.NPOINT) self.log.info("Stats: median=%f sigma=%f n=%d", outputStats['DETECTOR_MEDIAN'], outputStats['DETECTOR_SIGMA'], outputStats['DETECTOR_N']) # AMP LEVEL: for ampIdx, amp in enumerate(inputExp.getDetector()): ampName = amp.getName() ampExp = inputExp.Factory(inputExp, amp.getBBox()) stats = afwMath.makeStatistics( ampExp.getMaskedImage(), afwMath.MEANCLIP | afwMath.STDEVCLIP | afwMath.NPOINT, statsControl) outputStats[f'AMP_NAME_{ampIdx}'] = ampName outputStats[f'AMP_MEDIAN_{ampIdx}'] = stats.getValue( afwMath.MEANCLIP) outputStats[f'AMP_SIGMA_{ampIdx}'] = stats.getValue( afwMath.STDEVCLIP) outputStats[f'AMP_N_{ampIdx}'] = stats.getValue(afwMath.NPOINT) return pipeBase.Struct(outputStats=outputStats)
def testdeepCopy(self): apl = dafBase.PropertyList() apl.set("int", 42) aplp = dafBase.PropertyList() aplp.set("bottom", "x") apl.set("top", aplp) aplp2 = apl.deepCopy() self.assertTrue(aplp2.exists("int")) self.assertTrue(aplp2.exists("top.bottom")) self.assertEqual(aplp2.getAsInt("int"), 42) self.assertEqual(aplp2.getAsString("top.bottom"), "x") # Make sure it was indeed a deep copy. apl.set("int", 2008) apl.set("top.bottom", "z") self.assertEqual(apl.getAsInt("int"), 2008) self.assertEqual(apl.getAsString("top.bottom"), "z") self.assertEqual(aplp2.getAsInt("int"), 42) self.assertEqual(aplp2.getAsString("top.bottom"), "x")
def testAddVector(self): apl = dafBase.PropertyList() v = [42, 2008, 1] apl.set("ints", v) apl.add("ints", [-42, -2008, -1]) subclass = [FloatSubClass(1.23), FloatSubClass(4.56), FloatSubClass(7.89)] apl.add("subclass", subclass) self.assertEqual(apl.getArrayInt("ints"), [42, 2008, 1, -42, -2008, -1]) self.assertEqual(apl.get("subclass"), subclass)
def setUp(self): schema = afwTable.SourceTable.makeMinimalSchema() schema.addField("flux_flux", type=np.float64) schema.addField("flux_fluxSigma", type=np.float64) schema.addField("flux_flag", type="Flag") self.table = afwTable.SourceTable.make(schema) self.table.definePsfFlux("flux") self.ss1 = afwTable.SourceCatalog(self.table) self.ss2 = afwTable.SourceCatalog(self.table) self.metadata = dafBase.PropertyList()
def testCD_PC(self): """Test that we can read a FITS file with both CD and PC keys (like early Suprimecam files)""" md = dafBase.PropertyList() for k, v in ( ("EQUINOX", 2000.0), ("RADESYS", 'FK5'), ("CRPIX1" , 5353.0), ("CRPIX2" , -35.0), ("CD1_1" , 0.0), ("CD1_2" , -5.611E-05), ("CD2_1" , -5.611E-05), ("CD2_2" , -0.0), ("CRVAL1" , 4.5789875), ("CRVAL2" , 16.30004444), ("CUNIT1" , 'deg'), ("CUNIT2" , 'deg'), ("CTYPE1" , 'RA---TAN'), ("CTYPE2" , 'DEC--TAN'), ("CDELT1" , -5.611E-05), ("CDELT2" , 5.611E-05), ): md.set(k, v) wcs = afwImage.makeWcs(md) x, y = 1000, 2000 ra, dec = 4.459815023498577, 16.544199850984768 sky = wcs.pixelToSky(x, y) for i, v in enumerate([ra, dec]): self.assertEqual(sky[i].asDegrees(), v) for badPC in (False, True): if verbose: print "Checking PC coefficients: badPC =", badPC for k, v in ( ("PC001001", 0.0), ("PC001002", -1.0 if badPC else 1.0), ("PC002001", 1.0 if badPC else -1.0), ("PC002002", 0.0), ): md.set(k, v) # Check Greisen and Calabretta A&A 395 1061 (2002), Eq. 3 if not badPC: for i in (1, 2,): for j in (1, 2,): self.assertEqual(md.get("CD%d_%d" % (i, j)), md.get("CDELT%d" % i)*md.get("PC00%d00%d" % (i, j))) wcs = afwImage.makeWcs(md) sky = wcs.pixelToSky(x, y) for i, v in enumerate([ra, dec]): self.assertEqual(sky[i].asDegrees(), v)