def adaptArgsAndRun(self, inputData, inputDataIds, outputDataIds, butler): expId, expBits = butler.registry.packDataId("visit_detector", inputDataIds['exposure'], returnMaxBits=True) inputData['exposureIdInfo'] = ExposureIdInfo(expId, expBits) if self.config.doAstrometry: refObjLoader = ReferenceObjectLoader(dataIds=inputDataIds['astromRefCat'], butler=butler, config=self.config.astromRefObjLoader, log=self.log) self.pixelMargin = refObjLoader.config.pixelMargin self.astrometry.setRefObjLoader(refObjLoader) if self.config.doPhotoCal: photoRefObjLoader = ReferenceObjectLoader(inputDataIds['photoRefCat'], butler, self.config.photoRefObjLoader, self.log) self.pixelMargin = photoRefObjLoader.config.pixelMargin self.photoCal.match.setRefObjLoader(photoRefObjLoader) results = self.run(**inputData) if self.config.doWriteMatches: normalizedMatches = afwTable.packMatches(results.astromMatches) normalizedMatches.table.setMetadata(results.matchMeta) if self.config.doWriteMatchesDenormalized: denormMatches = denormalizeMatches(results.astromMatches, results.matchMeta) results.matchesDenormalized = denormMatches results.matches = normalizedMatches return results
def runQuantum(self, butlerQC, inputRefs, outputRefs): inputs = butlerQC.get(inputRefs) expId, expBits = butlerQC.quantum.dataId.pack("visit_detector", returnMaxBits=True) inputs['exposureIdInfo'] = ExposureIdInfo(expId, expBits) if self.config.doAstrometry: refObjLoader = ReferenceObjectLoader(dataIds=[ref.datasetRef.dataId for ref in inputRefs.astromRefCat], refCats=inputs.pop('astromRefCat'), config=self.config.astromRefObjLoader, log=self.log) self.pixelMargin = refObjLoader.config.pixelMargin self.astrometry.setRefObjLoader(refObjLoader) if self.config.doPhotoCal: photoRefObjLoader = ReferenceObjectLoader(dataIds=[ref.datasetRef.dataId for ref in inputRefs.photoRefCat], refCats=inputs.pop('photoRefCat'), config=self.config.photoRefObjLoader, log=self.log) self.pixelMargin = photoRefObjLoader.config.pixelMargin self.photoCal.match.setRefObjLoader(photoRefObjLoader) outputs = self.run(**inputs) if self.config.doWriteMatches and self.config.doAstrometry: normalizedMatches = afwTable.packMatches(outputs.astromMatches) normalizedMatches.table.setMetadata(outputs.matchMeta) if self.config.doWriteMatchesDenormalized: denormMatches = denormalizeMatches(outputs.astromMatches, outputs.matchMeta) outputs.matchesDenormalized = denormMatches outputs.matches = normalizedMatches butlerQC.put(outputs, outputRefs)
def testIdentity(self): nobj = 1000 for i in range(nobj): s = self.ss1.addNew() s.setId(i) s.set(afwTable.SourceTable.getCoordKey().getRa(), (10 + 0.001*i) * afwGeom.degrees) s.set(afwTable.SourceTable.getCoordKey().getDec(), (10 + 0.001*i) * afwGeom.degrees) s = self.ss2.addNew() s.setId(2*nobj + i) s.set(afwTable.SourceTable.getCoordKey().getRa(), (10 + 0.001*i) * afwGeom.degrees) s.set(afwTable.SourceTable.getCoordKey().getDec(), (10 + 0.001*i) * afwGeom.degrees) mat = afwTable.matchRaDec(self.ss1, self.ss2, 1.0 * afwGeom.arcseconds, False) self.assertEqual(len(mat), nobj) cat = afwTable.packMatches(mat) mat2 = afwTable.unpackMatches(cat, self.ss1, self.ss2) for m1, m2, c in zip(mat, mat2, cat): self.assertEqual(m1.first, m2.first) self.assertEqual(m1.second, m2.second) self.assertEqual(m1.distance, m2.distance) self.assertEqual(m1.first.getId(), c["first"]) self.assertEqual(m1.second.getId(), c["second"]) self.assertEqual(m1.distance, c["distance"]) if False: s0 = mat[0][0] s1 = mat[0][1] print s0.getRa(), s1.getRa(), s0.getId(), s1.getId()
def testJoin(self): res = self.getAstrometrySolution() matches = res.matches matchmeta = res.matchMeta normalized = packMatches(matches) normalized.table.setMetadata(matchmeta) matches2 = self.astrom.refObjLoader.joinMatchListWithCatalog( normalized, self.srcSet) self.assertEqual(len(matches2), len(matches)) for i in range(len(matches)): self.assertEqual(matches2[i].second.table, matches[i].second.table) self.assertEqual(matches2[i].second.getId(), matches[i].second.getId()) self.assertEqual( matches2[i].second, matches[i].second) # no deep copying, so we can compare ptrs self.assertEqual(matches2[i].first.getId(), matches[i].first.getId()) self.assertEqual(matches2[i].first.getRa().asDegrees(), matches[i].first.getRa().asDegrees()) self.assertEqual(matches2[i].first.getDec().asDegrees(), matches[i].first.getDec().asDegrees()) self.assertEqual(matches2[i].first.get("i_flux"), matches[i].first.get("i_flux"))
def testTicket2080(self): packed = afwTable.packMatches(self.matches) cat1 = self.cat1.copy() cat2 = afwTable.SimpleCatalog(self.schema) cat1.sort() cat2.sort() # just test that the next line doesn't segv afwTable.unpackMatches(packed, cat1, cat2)
def testIO(self): packed = afwTable.packMatches(self.matches) packed.writeFits(self.filename) matches = afwTable.BaseCatalog.readFits(self.filename) cat1 = self.cat1.copy() cat2 = self.cat2.copy() cat1.sort() cat2.sort() unpacked = afwTable.unpackMatches(matches, cat1, cat2) self.testMatches(unpacked)
def testIO(self): packed = afwTable.packMatches(self.matches) with lsst.utils.tests.getTempFilePath(".fits") as filename: packed.writeFits(filename) matches = afwTable.BaseCatalog.readFits(filename) cat1 = self.cat1.copy() cat2 = self.cat2.copy() cat1.sort() cat2.sort() unpacked = afwTable.unpackMatches(matches, cat1, cat2) self.testMatches(unpacked)
def testIdentity(self): nobj = 1000 for i in range(nobj): s = self.ss1.addNew() s.setId(i) s.set(afwTable.SourceTable.getCoordKey().getRa(), (10 + 0.001 * i) * afwGeom.degrees) s.set(afwTable.SourceTable.getCoordKey().getDec(), (10 + 0.001 * i) * afwGeom.degrees) s = self.ss2.addNew() s.setId(2 * nobj + i) # Give slight offsets for Coord testing of matches to/from catalog in checkMatchToFromCatalog() # Chosen such that the maximum offset (nobj*1E-7 deg = 0.36 arcsec) is within the maximum # distance (1 arcsec) in afwTable.matchRaDec. s.set(afwTable.SourceTable.getCoordKey().getRa(), (10 + 0.0010001 * i) * afwGeom.degrees) s.set(afwTable.SourceTable.getCoordKey().getDec(), (10 + 0.0010001 * i) * afwGeom.degrees) # Old API (pre DM-855) mat = afwTable.matchRaDec(self.ss1, self.ss2, 1.0 * afwGeom.arcseconds, False) self.assertEqual(len(mat), nobj) # New API mc = afwTable.MatchControl() mc.findOnlyClosest = False mat = afwTable.matchRaDec(self.ss1, self.ss2, 1.0 * afwGeom.arcseconds, mc) self.assertEqual(len(mat), nobj) cat = afwTable.packMatches(mat) mat2 = afwTable.unpackMatches(cat, self.ss1, self.ss2) for m1, m2, c in zip(mat, mat2, cat): self.assertEqual(m1.first, m2.first) self.assertEqual(m1.second, m2.second) self.assertEqual(m1.distance, m2.distance) self.assertEqual(m1.first.getId(), c["first"]) self.assertEqual(m1.second.getId(), c["second"]) self.assertEqual(m1.distance, c["distance"]) self.checkPickle(mat, checkSlots=False) self.checkPickle(mat2, checkSlots=False) self.checkMatchToFromCatalog(mat, cat) if False: s0 = mat[0][0] s1 = mat[0][1] print(s0.getRa(), s1.getRa(), s0.getId(), s1.getId())
def writeMatches(self, dataRef, exposure, sources): """Write matches of the sources to the astrometric reference catalog We use the Wcs in the exposure to match sources. dataRef: data reference exposure: exposure with Wcs sources: source catalog """ result = self.astrometry.astrometer.useKnownWcs(sources, exposure=exposure) if result.matches: matches = afwTable.packMatches(result.matches) matches.table.setMetadata(result.matchMetadata) dataRef.put(matches, self.config.coaddName + "Coadd_srcMatch")
def testIdentity(self): nobj = 1000 for i in range(nobj): s = self.ss1.addNew() s.setId(i) s.set(afwTable.SourceTable.getCoordKey().getRa(), (10 + 0.001*i) * afwGeom.degrees) s.set(afwTable.SourceTable.getCoordKey().getDec(), (10 + 0.001*i) * afwGeom.degrees) s = self.ss2.addNew() s.setId(2*nobj + i) # Give slight offsets for Coord testing of matches to/from catalog in checkMatchToFromCatalog() # Chosen such that the maximum offset (nobj*1E-7 deg = 0.36 arcsec) is within the maximum # distance (1 arcsec) in afwTable.matchRaDec. s.set(afwTable.SourceTable.getCoordKey().getRa(), (10 + 0.0010001*i) * afwGeom.degrees) s.set(afwTable.SourceTable.getCoordKey().getDec(), (10 + 0.0010001*i) * afwGeom.degrees) # Old API (pre DM-855) mat = afwTable.matchRaDec(self.ss1, self.ss2, 1.0 * afwGeom.arcseconds, False) self.assertEqual(len(mat), nobj) # New API mc = afwTable.MatchControl() mc.findOnlyClosest = False mat = afwTable.matchRaDec(self.ss1, self.ss2, 1.0*afwGeom.arcseconds, mc) self.assertEqual(len(mat), nobj) cat = afwTable.packMatches(mat) mat2 = afwTable.unpackMatches(cat, self.ss1, self.ss2) for m1, m2, c in zip(mat, mat2, cat): self.assertEqual(m1.first, m2.first) self.assertEqual(m1.second, m2.second) self.assertEqual(m1.distance, m2.distance) self.assertEqual(m1.first.getId(), c["first"]) self.assertEqual(m1.second.getId(), c["second"]) self.assertEqual(m1.distance, c["distance"]) self.checkPickle(mat, checkSlots=False) self.checkPickle(mat2, checkSlots=False) self.checkMatchToFromCatalog(mat, cat) if False: s0 = mat[0][0] s1 = mat[0][1] print s0.getRa(), s1.getRa(), s0.getId(), s1.getId()
def testIdentity(self): nobj = 1000 for i in range(nobj): s = self.ss1.addNew() s.setId(i) s.set(afwTable.SourceTable.getCoordKey().getRa(), (10 + 0.001 * i) * afwGeom.degrees) s.set(afwTable.SourceTable.getCoordKey().getDec(), (10 + 0.001 * i) * afwGeom.degrees) s = self.ss2.addNew() s.setId(2 * nobj + i) s.set(afwTable.SourceTable.getCoordKey().getRa(), (10 + 0.001 * i) * afwGeom.degrees) s.set(afwTable.SourceTable.getCoordKey().getDec(), (10 + 0.001 * i) * afwGeom.degrees) # Old API (pre DM-855) mat = afwTable.matchRaDec(self.ss1, self.ss2, 1.0 * afwGeom.arcseconds, False) self.assertEqual(len(mat), nobj) # New API mc = afwTable.MatchControl() mc.findOnlyClosest = False mat = afwTable.matchRaDec(self.ss1, self.ss2, 1.0 * afwGeom.arcseconds, mc) self.assertEqual(len(mat), nobj) cat = afwTable.packMatches(mat) mat2 = afwTable.unpackMatches(cat, self.ss1, self.ss2) for m1, m2, c in zip(mat, mat2, cat): self.assertEqual(m1.first, m2.first) self.assertEqual(m1.second, m2.second) self.assertEqual(m1.distance, m2.distance) self.assertEqual(m1.first.getId(), c["first"]) self.assertEqual(m1.second.getId(), c["second"]) self.assertEqual(m1.distance, c["distance"]) self.checkPickle(mat, checkSlots=False) self.checkPickle(mat2, checkSlots=False) if False: s0 = mat[0][0] s1 = mat[0][1] print s0.getRa(), s1.getRa(), s0.getId(), s1.getId()
def writeOutputs(self, dataRef, exposure, background, sourceCat, astromMatches, matchMeta): """Write output data to the output repository @param[in] dataRef butler data reference corresponding to a science image @param[in] exposure exposure to write @param[in] background background model for exposure @param[in] sourceCat catalog of measured sources @param[in] astromMatches list of source/refObj matches from the astrometry solver """ sourceWriteFlags = 0 if self.config.doWriteHeavyFootprintsInSources \ else afwTable.SOURCE_IO_NO_HEAVY_FOOTPRINTS dataRef.put(sourceCat, "src", flags=sourceWriteFlags) if self.config.doWriteMatches and astromMatches is not None: normalizedMatches = afwTable.packMatches(astromMatches) normalizedMatches.table.setMetadata(matchMeta) dataRef.put(normalizedMatches, "srcMatch") dataRef.put(exposure, "calexp") dataRef.put(background, "calexpBackground")
def testJoinAllFluxes(self): """Test that we can read all the fluxes back from an a.n.d catalogue""" res = self.getAstrometrySolution() matches = res.matches matchmeta = res.matchMeta normalized = packMatches(matches) normalized.table.setMetadata(matchmeta) matches2 = self.astrom.refObjLoader.joinMatchListWithCatalog(normalized, self.srcSet) self.assertGreater(len(matches2), 0) ref = matches2[0][0] names = ref.getSchema().getNames() for b in ("u", "g", "r", "i", "z"): self.assertIn("%s_flux" % (b,), names) self.assertIn("%s_fluxSigma" % (b,), names)
def testJoinAllFluxes(self): """Test that we can read all the fluxes back from an a.n.d catalogue""" res = self.getAstrometrySolution() matches = res.matches matchmeta = res.matchMeta normalized = afwTable.packMatches(matches) normalized.table.setMetadata(matchmeta) matches2 = self.astrom.refObjLoader.joinMatchListWithCatalog(normalized, self.srcSet) self.assertGreater(len(matches2), 0) ref = matches2[0][0] names = ref.getSchema().getNames() for b in ("u", "g", "r", "i", "z"): self.assertTrue("%s_flux" % (b,) in names) self.assertTrue("%s_fluxSigma" % (b,) in names)
def runQuantum(self, butlerQC, inputRefs, outputRefs): inputs = butlerQC.get(inputRefs) inputs['exposureIdInfo'] = obsBase.ExposureIdInfo.fromDataId( butlerQC.quantum.dataId, "visit_detector") if self.config.doAstrometry: inputs.pop('astromRefCat') if self.config.doPhotoCal: inputs.pop('photoRefCat') outputs = self.run(**inputs) if self.config.doWriteMatches and self.config.doAstrometry: normalizedMatches = afwTable.packMatches(outputs.astromMatches) if self.config.doWriteMatchesDenormalized: outputs.matchesDenormalized = outputs.astromMatches outputs.matches = normalizedMatches butlerQC.put(outputs, outputRefs)
def testJoinAllFluxes(self): """Test that we can read all the fluxes from a reference catalog""" res = self.getAstrometrySolution() matches = res.matches matchmeta = res.matchMeta normalized = packMatches(matches) normalized.table.setMetadata(matchmeta) matches2 = self.astrom.refObjLoader.joinMatchListWithCatalog(normalized, self.srcSet) self.assertGreater(len(matches2), 0) ref = matches2[0][0] refSchema = ref.getSchema() for b in ("u", "g", "r", "i", "z"): self.assertIn("%s_flux" % (b,), refSchema) self.assertIn("%s_fluxErr" % (b,), refSchema)
def testJoin(self): res = self.getAstrometrySolution() matches = res.matches matchmeta = res.matchMeta normalized = afwTable.packMatches(matches) normalized.table.setMetadata(matchmeta) matches2 = self.astrom.refObjLoader.joinMatchListWithCatalog(normalized, self.srcSet) self.assertEqual(len(matches2), len(matches)) for i in xrange(len(matches)): self.assertEqual(matches2[i].second.table, matches[i].second.table) self.assertEqual(matches2[i].second.getId(), matches[i].second.getId()) self.assertEqual(matches2[i].second, matches[i].second) # no deep copying, so we can compare ptrs self.assertEqual(matches2[i].first.getId(), matches[i].first.getId()) self.assertEqual(matches2[i].first.getRa().asDegrees(), matches[i].first.getRa().asDegrees()) self.assertEqual(matches2[i].first.getDec().asDegrees(), matches[i].first.getDec().asDegrees()) self.assertEqual(matches2[i].first.get("i_flux"), matches[i].first.get("i_flux"))
def writeOutputs(self, dataRef, exposure, background, sourceCat, astromMatches, matchMeta): """Write output data to the output repository @param[in] dataRef butler data reference corresponding to a science image @param[in] exposure exposure to write @param[in] background background model for exposure @param[in] sourceCat catalog of measured sources @param[in] astromMatches list of source/refObj matches from the astrometry solver """ dataRef.put(sourceCat, "src") if self.config.doWriteMatches and astromMatches is not None: normalizedMatches = afwTable.packMatches(astromMatches) normalizedMatches.table.setMetadata(matchMeta) dataRef.put(normalizedMatches, "srcMatch") if self.config.doWriteMatchesDenormalized: denormMatches = denormalizeMatches(astromMatches, matchMeta) dataRef.put(denormMatches, "srcMatchFull") dataRef.put(exposure, "calexp") dataRef.put(background, "calexpBackground")
def run(self, sensorRef): """Process one CCD @param sensorRef: sensor-level butler data reference @return pipe_base Struct containing these fields: - postIsrExposure: exposure after ISR performed if calib.doIsr or config.doCalibrate, else None - exposure: calibrated exposure (calexp): as computed if config.doCalibrate, else as upersisted and updated if config.doDetection, else None - calib: object returned by calibration process if config.doCalibrate, else None - sources: detected source if config.doPhotometry, else None """ self.log.info("Processing %s" % (sensorRef.dataId)) # initialize postIsrExposure exposure postIsrExposure = self.setPostIsrExposure(sensorRef) # initialize outputs idFactory = None calib = None sources = None backgrounds = afwMath.BackgroundList() if self.config.doCalibrate: idFactory = self.makeIdFactory(sensorRef) calib = self.calibrate.run(postIsrExposure, idFactory=idFactory) calExposure = calib.exposure if self.config.doWriteCalibrate: sensorRef.put(calib.sources, self.dataPrefix + "icSrc") if calib.matches is not None and self.config.doWriteCalibrateMatches: normalizedMatches = afwTable.packMatches(calib.matches) normalizedMatches.table.setMetadata(calib.matchMeta) sensorRef.put(normalizedMatches, self.dataPrefix + "icMatch") try: for bg in calib.backgrounds: backgrounds.append(bg) except TypeError: backgrounds.append(calib.backgrounds) except AttributeError: self.log.warn("The calibration task did not return any backgrounds. " + "Any background subtracted in the calibration process cannot be persisted.") elif sensorRef.datasetExists("calexp"): calExposure = sensorRef.get("calexp", immediate=True) else: raise RuntimeError("No calibrated exposure available for processing") # delegate most of the work to ProcessImageTask result = self.process(sensorRef, calExposure, idFactory=idFactory) # combine the differential background we estimated while detecting the main src catalog # with the background estimated in the calibrate step for bg in result.backgrounds: backgrounds.append(bg) result.backgrounds = backgrounds if self.config.doCalibrate and self.config.doWriteCalibrate: # wait until after detection and measurement, since detection sets detected mask bits # and both require a background subtracted exposure; if self.config.persistBackgroundModel: self.writeBackgrounds(sensorRef, backgrounds) else: self.restoreBackgrounds(calExposure, backgrounds) sensorRef.put(calExposure, self.dataPrefix + "calexp") if calib is not None: self.propagateCalibFlags(calib.sources, sources) return pipeBase.Struct( postIsrExposure = postIsrExposure, calib = calib, **result.getDict() )
def process(self, dataRef, inputExposure): """Process an Image @param dataRef: data reference that corresponds to the input image @param inputExposure: exposure to process @return pipe_base Struct containing these fields: - postIsrExposure: exposure after ISR performed if calib.doIsr or config.doCalibrate, else None - exposure: calibrated exposure (calexp): as computed if config.doCalibrate, else as upersisted and updated if config.doDetection, else None - calib: object returned by calibration process if config.doCalibrate, else None - apCorr: aperture correction: as computed config.doCalibrate, else as unpersisted if config.doMeasure, else None - sources: detected source if config.doPhotometry, else None """ idFactory = self.makeIdFactory(dataRef) # initialize outputs calExposure = None calib = None apCorr = None sources = None psf = None backgrounds = [] if self.config.doCalibrate: calib = self.calibrate.run(inputExposure, idFactory=idFactory) psf = calib.psf calExposure = calib.exposure apCorr = calib.apCorr if self.config.doWriteCalibrate: dataRef.put(calib.sources, self.dataPrefix + "icSrc") if calib.psf is not None: dataRef.put(calib.psf, self.dataPrefix + "psf") if calib.apCorr is not None: dataRef.put(calib.apCorr, self.dataPrefix + "apCorr") if calib.matches is not None and self.config.doWriteCalibrateMatches: normalizedMatches = afwTable.packMatches(calib.matches) normalizedMatches.table.setMetadata(calib.matchMeta) dataRef.put(normalizedMatches, self.dataPrefix + "icMatch") try: for bg in calib.backgrounds: backgrounds.append(bg) except TypeError: backgrounds.append(calib.backgrounds) except AttributeError: self.log.warn("The calibration task did not return any backgrounds. Any background subtracted in the calibration process cannot be persisted.") else: calib = None if self.config.doDetection: if calExposure is None: if not dataRef.datasetExists(self.dataPrefix + "calexp"): raise pipeBase.TaskError("doCalibrate false, doDetection true and calexp does not exist") calExposure = dataRef.get(self.dataPrefix + "calexp") if calib is None or calib.psf is None: psf = dataRef.get(self.dataPrefix + "psf") calExposure.setPsf(psf) table = afwTable.SourceTable.make(self.schema, idFactory) table.setMetadata(self.algMetadata) detections = self.detection.makeSourceCatalog(table, calExposure) sources = detections.sources fpSets = detections.fpSets if fpSets.background: backgrounds.append(fpSets.background) if self.config.doDeblend: if calExposure is None: calExposure = dataRef.get(self.dataPrefix + 'calexp') if psf is None: psf = dataRef.get(self.dataPrefix + 'psf') self.deblend.run(calExposure, sources, psf) if self.config.doMeasurement: if apCorr is None: apCorr = dataRef.get(self.dataPrefix + "apCorr") self.measurement.run(calExposure, sources, apCorr) if self.config.doWriteCalibrate: # wait until after detection and measurement, since detection sets detected mask bits and both require # a background subtracted exposure; # note that this overwrites an existing calexp if doCalibrate false if calExposure is None: self.log.warn("calibrated exposure is None; cannot save it") else: if self.config.persistBackgroundModel: self.log.warn("Persisting background models as an image") bg = backgrounds[0].getImageF() for b in backgrounds[1:]: bg += b.getImageF() dataRef.put(bg, self.dataPrefix+"calexpBackground") del bg else: mi = calExposure.getMaskedImage() for bg in backgrounds: mi += bg.getImageF() dataRef.put(calExposure, self.dataPrefix + "calexp") if calib is not None: self.propagateCalibFlags(calib.sources, sources) if sources is not None and self.config.doWriteSources: if self.config.doWriteHeavyFootprintsInSources: sources.setWriteHeavyFootprints(True) dataRef.put(sources, self.dataPrefix + 'src') if self.config.doWriteSourceMatches: self.log.info("Matching src to reference catalogue" % (dataRef.dataId)) srcMatches, srcMatchMeta = self.matchSources(calExposure, sources) normalizedSrcMatches = afwTable.packMatches(srcMatches) normalizedSrcMatches.table.setMetadata(srcMatchMeta) dataRef.put(normalizedSrcMatches, self.dataPrefix + "srcMatch") else: srcMatches = None; srcMatchMeta = None return pipeBase.Struct( inputExposure = inputExposure, exposure = calExposure, calib = calib, apCorr = apCorr, sources = sources, matches = srcMatches, matchMeta = srcMatchMeta, backgrounds = backgrounds, )
def process(self, dataRef, inputExposure, idFactory=None, backgrounds=None, enableWriteSources=True): """Process an Image @param dataRef: data reference that corresponds to the input image @param inputExposure: exposure to process @param idFactory: afw.table.IdFactory to use for source catalog @param backgrounds: afwMath.BackgroundList to be appended to @param enableWriteSources: if True then writing sources is allowed. Set False if you need to postprocess sources before writing them. @return pipe_base Struct containing these fields: - exposure: input exposure (as modified in the course of processing) - sources: detected source if config.doPhotometry, else None - matches: matches between detected sources and astrometric reference catalog - matchMeta: metadata for matches - backgrounds: background list (input as modified) """ if backgrounds is None: backgrounds = afwMath.BackgroundList() if self.config.doDetection: if idFactory is None: idFactory = self.makeIdFactory(dataRef) table = afwTable.SourceTable.make(self.schema, idFactory) table.setMetadata(self.algMetadata) detections = self.detection.run(table, inputExposure) sources = detections.sources fpSets = detections.fpSets if fpSets.background: backgrounds.append(fpSets.background) if self.config.doDeblend: self.deblend.run(inputExposure, sources, inputExposure.getPsf()) if self.config.doMeasurement: self.measurement.run(inputExposure, sources, exposureId=self.getExposureId(dataRef)) if sources is not None and self.config.doWriteSources: sourceWriteFlags = (0 if self.config.doWriteHeavyFootprintsInSources else afwTable.SOURCE_IO_NO_HEAVY_FOOTPRINTS) if enableWriteSources: dataRef.put(sources, self.dataPrefix + 'src', flags=sourceWriteFlags) srcMatches = None; srcMatchMeta = None if self.config.doMeasurement and self.config.doWriteSourceMatches: self.log.info("Matching src to reference catalogue") try: srcMatches, srcMatchMeta = self.matchSources(inputExposure, sources) normalizedSrcMatches = afwTable.packMatches(srcMatches) normalizedSrcMatches.table.setMetadata(srcMatchMeta) dataRef.put(normalizedSrcMatches, self.dataPrefix + "srcMatch") except Exception as e: self.log.warn("Unable to match to reference catalog: %s" % e) return pipeBase.Struct( exposure = inputExposure, sources = sources, matches = srcMatches, matchMeta = srcMatchMeta, backgrounds = backgrounds, )