def combineWithForce(meas, force): """Combine the meas and forced_src catalogs.""" if len(meas) != len(force): raise Exception("# Meas and Forced_src catalogs should have " + "the same size!") mapper = SchemaMapper(meas.schema) mapper.addMinimalSchema(meas.schema) newSchema = mapper.getOutputSchema() # Add new fields newSchema.addField('force.deblend.nchild', type=int) newSchema.addField('force.classification.extendedness', type=float) newSchema.addField('force.flux.kron', type=float) newSchema.addField('force.flux.kron.err', type=float) newSchema.addField('force.flux.psf', type=float) newSchema.addField('force.flux.psf.err', type=float) newSchema.addField('force.flux.kron.apcorr', type=float) newSchema.addField('force.flux.kron.apcorr.err', type=float) newSchema.addField('force.flux.psf.apcorr', type=float) newSchema.addField('force.flux.psf.apcorr.err', type=float) newSchema.addField('force.cmodel.flux', type=float) newSchema.addField('force.cmodel.flux.err', type=float) newSchema.addField('force.cmodel.fracDev', type=float) newSchema.addField('force.cmodel.exp.flux', type=float) newSchema.addField('force.cmodel.exp.flux.err', type=float) newSchema.addField('force.cmodel.dev.flux', type=float) newSchema.addField('force.cmodel.dev.flux.err', type=float) newSchema.addField('force.cmodel.flux.apcorr', type=float) newSchema.addField('force.cmodel.flux.apcorr.err', type=float) newSchema.addField('force.cmodel.exp.flux.apcorr', type=float) newSchema.addField('force.cmodel.exp.flux.apcorr.err', type=float) newSchema.addField('force.cmodel.dev.flux.apcorr', type=float) newSchema.addField('force.cmodel.dev.flux.apcorr.err', type=float) newCols = ['deblend.nchild', 'classification.extendedness', 'flux.kron', 'flux.kron.err', 'flux.psf', 'flux.psf.err', 'flux.kron.apcorr', 'flux.kron.apcorr.err', 'flux.psf.apcorr', 'flux.psf.apcorr.err', 'cmodel.flux', 'cmodel.flux.err', 'cmodel.flux', 'cmodel.flux.err', 'cmodel.flux.apcorr', 'cmodel.flux.apcorr.err', 'cmodel.exp.flux', 'cmodel.exp.flux.err', 'cmodel.exp.flux.apcorr', 'cmodel.exp.flux.apcorr.err', 'cmodel.dev.flux', 'cmodel.dev.flux.err', 'cmodel.dev.flux.apcorr', 'cmodel.dev.flux.apcorr.err', 'cmodel.fracDev'] combSrc = SourceCatalog(newSchema) combSrc.extend(meas, mapper=mapper) for key in newCols: combSrc['force.' + key][:] = force[key][:] for name in ("Centroid", "Shape"): val = getattr(meas.table, "get" + name + "Key")() err = getattr(meas.table, "get" + name + "ErrKey")() flag = getattr(meas.table, "get" + name + "FlagKey")() getattr(combSrc.table, "define" + name)(val, err, flag) return combSrc
def run(self, source_catalogs, vIds): # Concatenate catalogs schema = source_catalogs[0].schema size = sum([len(cat) for cat in source_catalogs]) source_catalog = SourceCatalog(schema) source_catalog.reserve(size) for cat in source_catalogs: source_catalog.extend(cat) return self.measure.run(source_catalog, self.config.connections.metric, vIds)
def loadAndMatchData(repo, visitDataIds, matchRadius=afwGeom.Angle(1, afwGeom.arcseconds), verbose=False): """Load data from specific visit. Match with reference. Parameters ---------- repo : string The repository. This is generally the directory on disk that contains the repository and mapper. visitDataIds : list of dict List of `butler` data IDs of Image catalogs to compare to reference. The `calexp` cpixel image is needed for the photometric calibration. matchRadius : afwGeom.Angle(). Radius for matching. verbose : bool, optional Output additional information on the analysis steps. Returns ------- afw.table.GroupView An object of matched catalog. """ # Following # https://github.com/lsst/afw/blob/tickets/DM-3896/examples/repeatability.ipynb butler = dafPersist.Butler(repo) dataset = 'src' # 2016-02-08 MWV: # I feel like I could be doing something more efficient with # something along the lines of the following: # dataRefs = [dafPersist.ButlerDataRef(butler, vId) for vId in visitDataIds] ccdKeyName = getCcdKeyName(visitDataIds[0]) schema = butler.get(dataset + "_schema", immediate=True).schema mapper = SchemaMapper(schema) mapper.addMinimalSchema(schema) mapper.addOutputField(Field[float]('base_PsfFlux_snr', "PSF flux SNR")) mapper.addOutputField(Field[float]('base_PsfFlux_mag', "PSF magnitude")) mapper.addOutputField(Field[float]('base_PsfFlux_magerr', "PSF magnitude uncertainty")) newSchema = mapper.getOutputSchema() # Create an object that can match multiple catalogs with the same schema mmatch = MultiMatch(newSchema, dataIdFormat={'visit': int, ccdKeyName: int}, radius=matchRadius, RecordClass=SimpleRecord) # create the new extented source catalog srcVis = SourceCatalog(newSchema) for vId in visitDataIds: try: calexpMetadata = butler.get("calexp_md", vId, immediate=True) except FitsError as fe: print(fe) print("Could not open calibrated image file for ", vId) print("Skipping %s " % repr(vId)) continue except TypeError as te: # DECam images that haven't been properly reformatted # can trigger a TypeError because of a residual FITS header # LTV2 which is a float instead of the expected integer. # This generates an error of the form: # # lsst::pex::exceptions::TypeError: 'LTV2 has mismatched type' # # See, e.g., DM-2957 for details. print(te) print("Calibration image header information malformed.") print("Skipping %s " % repr(vId)) continue calib = afwImage.Calib(calexpMetadata) oldSrc = butler.get('src', vId, immediate=True) print(len(oldSrc), "sources in ccd %s visit %s" % (vId[ccdKeyName], vId["visit"])) # create temporary catalog tmpCat = SourceCatalog(SourceCatalog(newSchema).table) tmpCat.extend(oldSrc, mapper=mapper) tmpCat['base_PsfFlux_snr'][:] = tmpCat['base_PsfFlux_flux'] / tmpCat['base_PsfFlux_fluxSigma'] with afwImageUtils.CalibNoThrow(): (tmpCat['base_PsfFlux_mag'][:], tmpCat['base_PsfFlux_magerr'][:]) = \ calib.getMagnitude(tmpCat['base_PsfFlux_flux'], tmpCat['base_PsfFlux_fluxSigma']) srcVis.extend(tmpCat, False) mmatch.add(catalog=tmpCat, dataId=vId) # Complete the match, returning a catalog that includes # all matched sources with object IDs that can be used to group them. matchCat = mmatch.finish() # Create a mapping object that allows the matches to be manipulated # as a mapping of object ID to catalog of sources. allMatches = GroupView.build(matchCat) return allMatches
def match_catalogs(inputs, photoCalibs, astromCalibs, vIds, matchRadius, apply_external_wcs=False, logger=None): schema = inputs[0].schema mapper = SchemaMapper(schema) mapper.addMinimalSchema(schema) mapper.addOutputField(Field[float]('base_PsfFlux_snr', 'PSF flux SNR')) mapper.addOutputField(Field[float]('base_PsfFlux_mag', 'PSF magnitude')) mapper.addOutputField(Field[float]('base_PsfFlux_magErr', 'PSF magnitude uncertainty')) # Needed because addOutputField(... 'slot_ModelFlux_mag') will add a field with that literal name aliasMap = schema.getAliasMap() # Possibly not needed since base_GaussianFlux is the default, but this ought to be safe modelName = aliasMap['slot_ModelFlux'] if 'slot_ModelFlux' in aliasMap.keys( ) else 'base_GaussianFlux' mapper.addOutputField(Field[float](f'{modelName}_mag', 'Model magnitude')) mapper.addOutputField(Field[float](f'{modelName}_magErr', 'Model magnitude uncertainty')) mapper.addOutputField(Field[float](f'{modelName}_snr', 'Model flux snr')) mapper.addOutputField(Field[float]('e1', 'Source Ellipticity 1')) mapper.addOutputField(Field[float]('e2', 'Source Ellipticity 1')) mapper.addOutputField(Field[float]('psf_e1', 'PSF Ellipticity 1')) mapper.addOutputField(Field[float]('psf_e2', 'PSF Ellipticity 1')) mapper.addOutputField(Field[np.int32]('filt', 'filter code')) newSchema = mapper.getOutputSchema() newSchema.setAliasMap(schema.getAliasMap()) # Create an object that matches multiple catalogs with same schema mmatch = MultiMatch(newSchema, dataIdFormat={ 'visit': np.int32, 'detector': np.int32 }, radius=matchRadius, RecordClass=SimpleRecord) # create the new extended source catalog srcVis = SourceCatalog(newSchema) filter_dict = { 'u': 1, 'g': 2, 'r': 3, 'i': 4, 'z': 5, 'y': 6, 'HSC-U': 1, 'HSC-G': 2, 'HSC-R': 3, 'HSC-I': 4, 'HSC-Z': 5, 'HSC-Y': 6 } # Sort by visit, detector, then filter vislist = [v['visit'] for v in vIds] ccdlist = [v['detector'] for v in vIds] filtlist = [v['band'] for v in vIds] tab_vids = Table([vislist, ccdlist, filtlist], names=['vis', 'ccd', 'filt']) sortinds = np.argsort(tab_vids, order=('vis', 'ccd', 'filt')) for ind in sortinds: oldSrc = inputs[ind] photoCalib = photoCalibs[ind] wcs = astromCalibs[ind] vId = vIds[ind] if logger: logger.debug( f"{len(oldSrc)} sources in ccd {vId['detector']} visit {vId['visit']}" ) # create temporary catalog tmpCat = SourceCatalog(SourceCatalog(newSchema).table) tmpCat.extend(oldSrc, mapper=mapper) filtnum = filter_dict[vId['band']] tmpCat['filt'] = np.repeat(filtnum, len(oldSrc)) tmpCat['base_PsfFlux_snr'][:] = tmpCat['base_PsfFlux_instFlux'] \ / tmpCat['base_PsfFlux_instFluxErr'] if apply_external_wcs and wcs is not None: updateSourceCoords(wcs, tmpCat) photoCalib.instFluxToMagnitude(tmpCat, "base_PsfFlux", "base_PsfFlux") tmpCat['slot_ModelFlux_snr'][:] = ( tmpCat['slot_ModelFlux_instFlux'] / tmpCat['slot_ModelFlux_instFluxErr']) photoCalib.instFluxToMagnitude(tmpCat, "slot_ModelFlux", "slot_ModelFlux") _, psf_e1, psf_e2 = ellipticity_from_cat(oldSrc, slot_shape='slot_PsfShape') _, star_e1, star_e2 = ellipticity_from_cat(oldSrc, slot_shape='slot_Shape') tmpCat['e1'][:] = star_e1 tmpCat['e2'][:] = star_e2 tmpCat['psf_e1'][:] = psf_e1 tmpCat['psf_e2'][:] = psf_e2 srcVis.extend(tmpCat, False) mmatch.add(catalog=tmpCat, dataId=vId) # Complete the match, returning a catalog that includes # all matched sources with object IDs that can be used to group them. matchCat = mmatch.finish() # Create a mapping object that allows the matches to be manipulated # as a mapping of object ID to catalog of sources. # I don't think I can persist a group view, so this may need to be called in a subsequent task # allMatches = GroupView.build(matchCat) return srcVis, matchCat
def run(self, images, ref, replacers, imageId): """Process coadds from all bands for a single patch. This method should not add or modify self. So far all children are u sing this exact code so leaving it here for now. If we specialize a lot, might make a processor its own object Parameters ---------- images : `dict` of `lsst.afw.image.ExposureF` Coadd images and associated metadata, keyed by filter name. ref : `lsst.afw.table.SourceCatalog` A catalog with one record for each object, containing "best" measurements across all bands. replacers : `dict` of `lsst.meas.base.NoiseReplacer`, optional A dictionary of `~lsst.meas.base.NoiseReplacer` objects that can be used to insert and remove deblended pixels for each object. When not `None`, all detected pixels in ``images`` will have *already* been replaced with noise, and this *must* be used to restore objects one at a time. imageId : `int` Unique ID for this unit of data. Should be used (possibly indirectly) to seed random numbers. Returns ------- results : `lsst.pipe.base.Struct` Struct with (at least) an `output` attribute that is a catalog to be written as ``self.config.output``. """ tm0 = time.time() nproc = 0 self.set_rng(imageId) config=self.cdict self.log.info(pprint.pformat(config)) extractor = self._get_extractor(images) # Make an empty catalog output = SourceCatalog(self.schema) # Add mostly-empty rows to it, copying IDs from the ref catalog. output.extend(ref, mapper=self.mapper) index_range=self.get_index_range(output) for n, (outRecord, refRecord) in enumerate(zip(output, ref)): if n < index_range[0] or n > index_range[1]: continue self.log.info('index: %06d/%06d' % (n,index_range[1])) nproc += 1 outRecord.setFootprint(None) # copied from ref; don't need to write these again # Insert the deblended pixels for just this object into all images. if replacers is not None: for r in replacers.values(): r.insertSource(refRecord.getId()) mbobs = extractor.get_mbobs(refRecord) res = self._process_observations(ref['id'][n], mbobs) self._copy_result(mbobs, res, outRecord) # Remove the deblended pixels for this object so we can process the next one. if replacers is not None: for r in replacers.values(): r.removeSource(refRecord.getId()) # Restore all original pixels in the images. if replacers is not None: for r in replacers.values(): r.end() tm = time.time()-tm0 self.log.info('time: %g min' % (tm/60.0)) self.log.info('time per: %g sec' % (tm/nproc)) return Struct(output=output)