def testMockCalibrateTask(self): task = MockCalibrateTask() pipelineTests.assertValidInitOutput(task) # Even the real CalibrateTask won't pass assertValidOutput, because for # some reason the outputs are injected in runQuantum rather than run. self.butler.put(afwImage.ExposureF(), "icExp", self.visitId) self.butler.put(afwMath.BackgroundList(), "icExpBackground", self.visitId) self.butler.put(afwTable.SourceCatalog(), "icSrc", self.visitId) self.butler.put(afwTable.SimpleCatalog(), "gaia_dr2_20200414", self.htmId) self.butler.put(afwTable.SimpleCatalog(), "ps1_pv3_3pi_20170110", self.htmId) quantum = pipelineTests.makeQuantum( task, self.butler, self.visitId, { "exposure": self.visitId, "background": self.visitId, "icSourceCat": self.visitId, "astromRefCat": [self.htmId], "photoRefCat": [self.htmId], "outputExposure": self.visitId, "outputCat": self.visitId, "outputBackground": self.visitId, "matches": self.visitId, "matchesDenormalized": self.visitId, }) pipelineTests.runTestQuantum(task, self.butler, quantum, mockRun=False)
def readAlexieMASKED_reg(fileName="/Users/rhl/Dropbox/Robert/MASKED.reg"): """Read Alexie's COSMOS table, converting it to a minimal form that can be used with afwTable.matchRaDec""" with open(fileName, "r") as fd: while True: line = fd.readline() if not line: break mat = re.search(r"fk5;circle\(\s*(\d+\.\d+),\s*(\d+\.\d+)\s*,\s*(\d+\.\d+)\"\)# color=\w+\s*$", line) if mat: ra, dec, rad = [float(_) for _ in mat.groups()] print ra, dec, rad return schema = afwTable.SimpleTable.makeMinimalSchema() schema.addField(afwTable.Field["I"]("mu_class", "S/G classification. 1: galaxy; 2: star; 3: other")) schema.addField(afwTable.Field["F"]("mag_auto", "SExtractor's mag_auto")) cat = afwTable.SimpleCatalog(schema) n = len(data) # There has to be a better way! https://jira.lsstcorp.org/browse/DM-347 cat.reserve(n) for i in range(n): cat.addNew() cat["id"][:] = data["IDENT"] cat["coord.ra"][:] = data["ALPHA_J2000"] cat["coord.dec"][:] = data["DELTA_J2000"] cat["mu_class"][:] = data["MU_CLASS"] cat["mag_auto"][:] = data["MAG_AUTO"] cat.writeFits("cosmos_sg.fits")
def readAlexieFITS(fileName="/Users/rhl/Dropbox/Robert/cosmos_forhsc_feb20_2012.fits"): """Read Alexie's COSMOS table, converting it to a minimal form that can be used with afwTable.matchRaDec""" import pyfits tbl = pyfits.open(fileName) data = tbl[1].data schema = afwTable.SimpleTable.makeMinimalSchema() schema.addField(afwTable.Field["I"]("mu_class", "S/G classification. 1: galaxy; 2: star; 3: other")) schema.addField(afwTable.Field["F"]("mag_auto", "SExtractor's mag_auto")) cat = afwTable.SimpleCatalog(schema) n = len(data) # There has to be a better way! https://jira.lsstcorp.org/browse/DM-347 cat.reserve(n) for i in range(n): cat.addNew() cat["id"][:] = data["IDENT"] cat["coord.ra"][:] = data["ALPHA_J2000"] cat["coord.dec"][:] = data["DELTA_J2000"] cat["mu_class"][:] = data["MU_CLASS"] cat["mag_auto"][:] = data["MAG_AUTO"] cat.writeFits("cosmos_sg.fits")
def loadData(self, rangePix=3000, numPoints=25): """Load catalogs and make the match list This is a separate function so data can be reloaded if fitting more than once (each time a WCS is fit it may update the source catalog, reference catalog and match list) """ refSchema = LoadReferenceObjectsTask.makeMinimalSchema( filterNameList=["r"], addIsPhotometric=True, addCentroid=True) self.refCat = afwTable.SimpleCatalog(refSchema) srcSchema = afwTable.SourceTable.makeMinimalSchema() SingleFrameMeasurementTask(schema=srcSchema) self.srcCoordKey = afwTable.CoordKey(srcSchema["coord"]) self.srcCentroidKey = afwTable.Point2DKey(srcSchema["slot_Centroid"]) self.srcCentroidKey_xErr = srcSchema["slot_Centroid_xErr"].asKey() self.srcCentroidKey_yErr = srcSchema["slot_Centroid_yErr"].asKey() self.sourceCat = afwTable.SourceCatalog(srcSchema) self.matches = [] for i in np.linspace(0., rangePix, numPoints): for j in np.linspace(0., rangePix, numPoints): src = self.sourceCat.addNew() refObj = self.refCat.addNew() src.set(self.srcCentroidKey, lsst.geom.Point2D(i, j)) src.set(self.srcCentroidKey_xErr, 0.1) src.set(self.srcCentroidKey_yErr, 0.1) c = self.tanWcs.pixelToSky(i, j) refObj.setCoord(c) self.matches.append(self.MatchClass(refObj, src, 0.0))
def _outputStandardStars(self, butler, offsets): """ Output standard stars in indexed reference catalog format. Parameters ---------- butler: `lsst.daf.persistence.Butler` offsets: `np.array` of floats Per band zeropoint offsets """ self.log.info("Outputting standard stars to %s" % (self.config.datasetConfig.ref_dataset_name)) # Load the stars (this is the full set of stars, no cuts) stars = butler.get('fgcmStandardStars', fgcmcycle=self.useCycle) # We determine the conversion from the native units (typically radians) to # degrees for the first star. This allows us to treat coord_ra/coord_dec as # numpy arrays rather than Angles, which would we approximately 600x slower. # TODO: Fix this after DM-16524 (HtmIndexer.indexPoints should take coords # (as Angles) for input conv = stars[0]['coord_ra'].asDegrees() / float(stars[0]['coord_ra']) indices = np.array( self.indexer.indexPoints(stars['coord_ra'] * conv, stars['coord_dec'] * conv)) formattedCat = self._formatCatalog(stars, offsets) # Write the master schema dataId = self.indexer.makeDataId( 'master_schema', self.config.datasetConfig.ref_dataset_name) butler.put(afwTable.SimpleCatalog(formattedCat.schema), 'ref_cat', dataId=dataId) # Break up the pixels using a histogram h, rev = esutil.stat.histogram(indices, rev=True) gd, = np.where(h > 0) selected = np.zeros(len(formattedCat), dtype=np.bool) for i in gd: i1a = rev[rev[i]:rev[i + 1]] # the formattedCat afwTable can only be indexed with boolean arrays, # and not numpy index arrays (see DM-16497). This little trick # converts the index array into a boolean array selected[:] = False selected[i1a] = True # Write the individual pixel dataId = self.indexer.makeDataId( indices[i1a[0]], self.config.datasetConfig.ref_dataset_name) butler.put(formattedCat[selected], 'ref_cat', dataId=dataId) # And save the dataset configuration dataId = self.indexer.makeDataId( None, self.config.datasetConfig.ref_dataset_name) butler.put(self.config.datasetConfig, 'ref_cat_config', dataId=dataId) self.log.info("Done outputting standard stars.")
def setUp(self): self.crval = afwCoord.IcrsCoord(afwGeom.PointD(44., 45.)) self.crpix = afwGeom.Point2D(15000, 4000) arcsecPerPixel = 1 / 3600.0 CD11 = arcsecPerPixel CD12 = 0 CD21 = 0 CD22 = arcsecPerPixel self.wcs = afwImage.makeWcs(self.crval, self.crpix, CD11, CD12, CD21, CD22) refSchema = afwTable.SimpleTable.makeMinimalSchema() self.refCentroidKey = afwTable.Point2DKey.addFields( refSchema, "centroid", "centroid", "pixels") self.refCoordKey = afwTable.CoordKey(refSchema["coord"]) self.refCat = afwTable.SimpleCatalog(refSchema) # an alias is required to make src.getCentroid() work; # simply defining a field named "slot_Centroid" doesn't suffice srcSchema = afwTable.SourceTable.makeMinimalSchema() self.srcCentroidKey = afwTable.Point2DKey.addFields( srcSchema, "base_SdssCentroid", "centroid", "pixels") srcAliases = srcSchema.getAliasMap() srcAliases.set("slot_Centroid", "base_SdssCentroid") self.srcCoordKey = afwTable.CoordKey(srcSchema["coord"]) self.sourceCat = afwTable.SourceCatalog(srcSchema)
def loadAndMatch(self, exposure, sourceCat): """!Fake loading and matching Copy the source catalog to a reference catalog and producing a match list """ wcs = exposure.getWcs() refSchema = LoadReferenceObjectsTask.makeMinimalSchema( filterNameList=[FilterName], addIsPhotometric=True, ) refCat = afwTable.SimpleCatalog(refSchema) refFluxKey = refSchema[FilterName + "_flux"].asKey() refIsPhotoKey = refSchema["photometric"].asKey() matches = lsst.afw.table.ReferenceMatchVector() for src in sourceCat: flux = 1e-3 * src.getPsfFlux() * np.random.normal(1.0, 2e-2) refObj = refCat.addNew() refObj.set(refFluxKey, flux) refObj.setCoord(wcs.pixelToSky(src.getCentroid())) refObj.set(refIsPhotoKey, True) match = lsst.afw.table.ReferenceMatch(refObj, src, 0) matches.append(match) return pipeBase.Struct( refCat=refCat, matches=matches, matchMeta=createMatchMetadata(exposure), )
def makeRefCatalog(self): schema = LoadReferenceObjectsTask.makeMinimalSchema( filterNameList=["g", "r"], addIsPhotometric=True, addIsResolved=True) catalog = afwTable.SimpleCatalog(schema) return catalog
def _filterRefCat(self, refCat, refFluxField): """Sub-select a number of reference objects starting from the brightest and maxing out at the number specified by maxRefObjects in the config. No trimming is done if len(refCat) > config.maxRefObjects. Parameters ---------- refCat : `lsst.afw.table.SimpleCatalog` Catalog of reference objects to trim. refFluxField : `str` field of refCat to use for flux Returns ------- outCat : `lsst.afw.table.SimpleCatalog` Catalog trimmed to the number set in the task config from the brightest flux down. """ # Find the flux cut that gives us the desired number of objects. if len(refCat) <= self.config.maxRefObjects: return refCat fluxArray = refCat.get(refFluxField) sortedFluxArray = fluxArray[fluxArray.argsort()] minFlux = sortedFluxArray[-(self.config.maxRefObjects + 1)] selected = (refCat.get(refFluxField) > minFlux) outCat = afwTable.SimpleCatalog(refCat.schema) outCat.reserve(self.config.maxRefObjects) outCat.extend(refCat[selected]) return outCat
def getCatalog(self, pixelId, schema, nNewElements): """Get a catalog from disk or create it if it doesn't exist. Parameters ---------- pixelId : `dict` Identifier for catalog to retrieve schema : `lsst.afw.table.Schema` Schema to use in catalog creation it does not exist. nNewElements : `int` The number of new elements that will be added to the catalog, so space can be preallocated. Returns ------- catalog : `lsst.afw.table.SimpleCatalog` The new or read-and-resized catalog specified by `dataId`. """ # This is safe, because we lock on this file before getCatalog is called. if os.path.isfile(self.filenames[pixelId]): catalog = afwTable.SimpleCatalog.readFits(self.filenames[pixelId]) catalog.resize(len(catalog) + nNewElements) return catalog.copy( deep=True ) # ensure contiguity, so that column-assignment works catalog = afwTable.SimpleCatalog(schema) catalog.resize(nNewElements) self.addRefCatMetadata(catalog) return catalog
def setUp(self): self.crval = afwGeom.SpherePoint(44.0, 45.0, afwGeom.degrees) self.crpix = afwGeom.Point2D(15000, 4000) arcsecPerPixel = 1 / 3600.0 cdMatrix = afwGeom.makeCdMatrix(arcsecPerPixel * afwGeom.arcseconds) self.wcs = afwGeom.makeSkyWcs(crval=self.crval, crpix=self.crpix, cdMatrix=cdMatrix) refSchema = afwTable.SimpleTable.makeMinimalSchema() self.refCentroidKey = afwTable.Point2DKey.addFields( refSchema, "centroid", "centroid", "pixels") self.refCoordKey = afwTable.CoordKey(refSchema["coord"]) self.refHasCentroidKey = refSchema.addField("hasCentroid", type="Flag") self.refCat = afwTable.SimpleCatalog(refSchema) # an alias is required to make src.getCentroid() work; # simply defining a field named "slot_Centroid" doesn't suffice srcSchema = afwTable.SourceTable.makeMinimalSchema() self.srcCentroidKey = afwTable.Point2DKey.addFields( srcSchema, "base_SdssCentroid", "centroid", "pixels") srcAliases = srcSchema.getAliasMap() srcAliases.set("slot_Centroid", "base_SdssCentroid") self.srcCoordKey = afwTable.CoordKey(srcSchema["coord"]) self.sourceCat = afwTable.SourceCatalog(srcSchema)
def testFilterAliasMap(self): """Make a schema with filter aliases.""" for defaultFilter in ("", "r", "camr"): for filterMap in ({}, {"camr": "r"}): config = TrivialLoader.ConfigClass() config.defaultFilter = defaultFilter config.filterMap = filterMap loader = TrivialLoader(config=config) refSchema = TrivialLoader.makeMinimalSchema(filterNameList="r") try: loader._addFluxAliases(refSchema) self.assertNotEqual(defaultFilter, "camr") except Exception: # only reference filters are allowed as default filters self.assertEqual(defaultFilter, "camr") continue self.assertIn("r_flux", refSchema) self.assertIn("r_fluxErr", refSchema) # camera filters aliases are named <filter>_camFlux if "camr" in filterMap: self.assertEqual(getRefFluxField(refSchema, "camr"), "camr_camFlux") else: with self.assertRaises(RuntimeError): getRefFluxField(refSchema, "camr") # if a non-empty default filter is specified then camFlux # and camFluxErr should be present hasDefault = bool(defaultFilter) self.assertEqual("camFlux" in refSchema, hasDefault) self.assertEqual("camFluxErr" in refSchema, hasDefault) refCat = afwTable.SimpleCatalog(refSchema) refObj = refCat.addNew() refObj["r_flux"] = 1.23 self.assertAlmostEqual( refCat[0].get(getRefFluxField(refSchema, "r")), 1.23) if "camr" in filterMap: self.assertAlmostEqual( refCat[0].get(getRefFluxField(refSchema, "camr")), 1.23) if hasDefault: self.assertEqual(getRefFluxField(refSchema, ""), "camFlux") self.assertAlmostEqual( refCat[0].get(getRefFluxField(refSchema, "")), 1.23) refObj["r_fluxErr"] = 0.111 if "camr" in filterMap: self.assertEqual(refCat[0].get("camr_camFluxErr"), 0.111) fluxKey, fluxErrKey = getRefFluxKeys(refSchema, "r") self.assertEqual(refCat[0].get(fluxKey), 1.23) self.assertEqual(refCat[0].get(fluxErrKey), 0.111) if "camr" in filterMap: fluxKey, fluxErrKey = getRefFluxKeys(refSchema, "camr") self.assertEqual(refCat[0].get(fluxErrKey), 0.111) else: with self.assertRaises(RuntimeError): getRefFluxKeys(refSchema, "camr")
def testTicket2080(self): packed = afwTable.packMatches(self.matches) cat1 = self.cat1.copy() cat2 = afwTable.SimpleCatalog(self.schema) cat1.sort() cat2.sort() # just test that the next line doesn't segv afwTable.unpackMatches(packed, cat1, cat2)
def _formatCatalog(self, fgcmStarCat, offsets, bands): """ Turn an FGCM-formatted star catalog, applying zeropoint offsets. Parameters ---------- fgcmStarCat : `lsst.afw.Table.SimpleCatalog` SimpleCatalog as output by fgcmcal offsets : `list` with len(self.bands) entries Zeropoint offsets to apply bands : `list` [`str`] List of band names from FGCM output Returns ------- formattedCat: `lsst.afw.table.SimpleCatalog` SimpleCatalog suitable for using as a reference catalog """ sourceMapper = afwTable.SchemaMapper(fgcmStarCat.schema) minSchema = LoadIndexedReferenceObjectsTask.makeMinimalSchema( bands, addCentroid=False, addIsResolved=True, coordErrDim=0) sourceMapper.addMinimalSchema(minSchema) for band in bands: sourceMapper.editOutputSchema().addField('%s_nGood' % (band), type=np.int32) sourceMapper.editOutputSchema().addField('%s_nTotal' % (band), type=np.int32) sourceMapper.editOutputSchema().addField('%s_nPsfCandidate' % (band), type=np.int32) formattedCat = afwTable.SimpleCatalog(sourceMapper.getOutputSchema()) formattedCat.reserve(len(fgcmStarCat)) formattedCat.extend(fgcmStarCat, mapper=sourceMapper) # Note that we don't have to set `resolved` because the default is False for b, band in enumerate(bands): mag = fgcmStarCat['mag_std_noabs'][:, b].astype( np.float64) + offsets[b] # We want fluxes in nJy from calibrated AB magnitudes # (after applying offset). Updated after RFC-549 and RFC-575. flux = (mag * units.ABmag).to_value(units.nJy) fluxErr = (np.log(10.) / 2.5) * flux * fgcmStarCat['magErr_std'][:, b].astype( np.float64) formattedCat['%s_flux' % (band)][:] = flux formattedCat['%s_fluxErr' % (band)][:] = fluxErr formattedCat['%s_nGood' % (band)][:] = fgcmStarCat['ngood'][:, b] formattedCat['%s_nTotal' % (band)][:] = fgcmStarCat['ntotal'][:, b] formattedCat['%s_nPsfCandidate' % (band)][:] = fgcmStarCat['npsfcand'][:, b] addRefCatMetadata(formattedCat) return formattedCat
def make_catalog(): schema = LoadReferenceObjectsTask.makeMinimalSchema(['r', 'z']) schema.addField('bad_flux', doc='old flux units', type=float, units='') schema.addField('bad_fluxErr', doc='old flux units', type=float, units='Jy') refCat = afwTable.SimpleCatalog(schema) refObj = refCat.addNew() refObj["bad_flux"] = flux refObj["bad_fluxErr"] = fluxErr return refCat
def __init__(self): schema = afwTable.SimpleTable.makeMinimalSchema() schema.addField("radius", "Angle", "radius of mask") self._catalog = afwTable.SimpleCatalog(schema) self._catalog.table.setMetadata(dafBase.PropertyList()) self.table = self._catalog.table self.addNew = self._catalog.addNew
def _outputStandardStars(self, butler, offsets): """ Output standard stars in indexed reference catalog format. Parameters ---------- butler: lsst.daf.persistence.Butler offsets: np.array of floats Per band zeropoint offsets """ self.log.info("Outputting standard stars to %s" % (self.config.datasetConfig.ref_dataset_name)) # Load the stars (this is the full set of stars, no cuts) stars = butler.get('fgcmStandardStars', fgcmcycle=self.useCycle) # We assume these are returned in radians, because the units interface # required a loop over a giant catalog. # TODO: figure out check of native units indices = np.array( self.indexer.indexPoints(np.degrees(stars['coord_ra']), np.degrees(stars['coord_dec']))) formattedCat = self._formatCatalog(stars, offsets) # Write the master schema dataId = self.indexer.makeDataId( 'master_schema', self.config.datasetConfig.ref_dataset_name) butler.put(afwTable.SimpleCatalog(formattedCat.schema), 'ref_cat', dataId=dataId) # Break up the pixels using a histogram h, rev = esutil.stat.histogram(indices, rev=True) gd, = np.where(h > 0) selected = np.zeros(len(formattedCat), dtype=np.bool) for i in gd: i1a = rev[rev[i]:rev[i + 1]] # Turn into a boolean array selected[:] = False selected[i1a] = True # Write the individual pixel dataId = self.indexer.makeDataId( indices[i1a[0]], self.config.datasetConfig.ref_dataset_name) butler.put(formattedCat[selected], 'ref_cat', dataId=dataId) # And save the dataset configuration dataId = self.indexer.makeDataId( None, self.config.datasetConfig.ref_dataset_name) butler.put(self.config.datasetConfig, 'ref_cat_config', dataId=dataId) self.log.info("Done outputting standard stars.")
def makeDataHorizontal(self): """Make minimal sythetic catalog with simple values.""" ra_test = np.linspace(np.radians(-10.), np.radians(10.), 101) dec_test = np.tile(0., 101) schema = afwTable.SourceTable.makeMinimalSchema() cat = afwTable.SimpleCatalog(schema) cat.resize(len(ra_test)) cat = cat.copy(deep=True) cat['coord_ra'][:] = ra_test cat['coord_dec'][:] = dec_test return cat
def loadSkyCircle(self, ctrCoord, radius, filterName=None): """!Load reference objects that overlap a circular sky region @param[in] ctrCoord center of search region (an lsst.afw.geom.Coord) @param[in] radius radius of search region (an lsst.afw.geom.Angle) @param[in] filterName name of filter, or None for the default filter; used for flux values in case we have flux limits (which are not yet implemented) @return an lsst.pipe.base.Struct containing: - refCat a catalog of reference objects with the \link meas_algorithms_loadReferenceObjects_Schema standard schema \endlink as documented in LoadReferenceObjects, including photometric, resolved and variable; hasCentroid is False for all objects. - fluxField = name of flux field for specified filterName. None if refCat is None. """ id_list, boundary_mask = self.indexer.get_pixel_ids(ctrCoord, radius) shards = self.get_shards(id_list) refCat = self.butler.get('ref_cat', dataId=self.indexer.make_data_id( 'master_schema', self.ref_dataset_name), immediate=True) self._addFluxAliases(refCat.schema) fluxField = getRefFluxField(schema=refCat.schema, filterName=filterName) for shard, is_on_boundary in zip(shards, boundary_mask): if shard is None: continue if is_on_boundary: refCat.extend(self._trim_to_circle(shard, ctrCoord, radius)) else: refCat.extend(shard) # make sure catalog is contiguous if not refCat.isContiguous(): refCat = refCat.copy() # add and initialize centroid and hasCentroid fields (these are added # after loading to avoid wasting space in the saved catalogs) # the new fields are automatically initialized to (nan, nan) and False # so no need to set them explicitly mapper = afwTable.SchemaMapper(refCat.schema, True) mapper.addMinimalSchema(refCat.schema, True) mapper.editOutputSchema().addField("centroid_x", type=float) mapper.editOutputSchema().addField("centroid_y", type=float) mapper.editOutputSchema().addField("hasCentroid", type="Flag") expandedCat = afwTable.SimpleCatalog(mapper.getOutputSchema()) expandedCat.extend(refCat, mapper=mapper) del refCat # avoid accidentally returning the unexpanded reference catalog # return reference catalog return pipeBase.Struct( refCat=expandedCat, fluxField=fluxField, )
def setUp(self): self.size = 10 self.numMatches = self.size//2 self.schema = afwTable.SimpleTable.makeMinimalSchema() self.cat1 = afwTable.SimpleCatalog(self.schema) self.cat2 = afwTable.SimpleCatalog(self.schema) for i in range(self.size): record1 = self.cat1.table.makeRecord() record2 = self.cat2.table.makeRecord() record1.setId(i + 1) record2.setId(self.size - i) self.cat1.append(record1) self.cat2.append(record2) self.matches = [] for i in range(self.numMatches): index = 2*i match = afwTable.SimpleMatch( self.cat1[index], self.cat2[self.size - index - 1], index) if Debug: print("Inject:", match.first.getId(), match.second.getId()) self.matches.append(match)
def loadSkyCircle(self, ctrCoord, radius, filterName=None, epoch=None): shardIdList, isOnBoundaryList = self.indexer.getShardIds( ctrCoord, radius) shards = self.getShards(shardIdList) refCat = self.butler.get('ref_cat', dataId=self.indexer.makeDataId( 'master_schema', self.ref_dataset_name), immediate=True) self._addFluxAliases(refCat.schema) fluxField = getRefFluxField(schema=refCat.schema, filterName=filterName) for shard, isOnBoundary in zip(shards, isOnBoundaryList): if shard is None: continue if isOnBoundary: refCat.extend(self._trimToCircle(shard, ctrCoord, radius)) else: refCat.extend(shard) if epoch is not None and "pm_ra" in refCat.schema: # check for a catalog in a non-standard format if isinstance(refCat.schema["pm_ra"].asKey(), lsst.afw.table.KeyAngle): self.applyProperMotions(refCat, epoch) else: self.log.warn( "Catalog pm_ra field is not an Angle; not applying proper motion" ) # add and initialize centroid and hasCentroid fields (these are # added after loading to avoid wasting space in the saved catalogs) # the new fields are automatically initialized to (nan, nan) and # False so no need to set them explicitly mapper = afwTable.SchemaMapper(refCat.schema, True) mapper.addMinimalSchema(refCat.schema, True) mapper.editOutputSchema().addField("centroid_x", type=float) mapper.editOutputSchema().addField("centroid_y", type=float) mapper.editOutputSchema().addField("hasCentroid", type="Flag") expandedCat = afwTable.SimpleCatalog(mapper.getOutputSchema()) expandedCat.extend(refCat, mapper=mapper) del refCat # avoid accidentally returning the unexpanded ref cat # make sure catalog is contiguous if not expandedCat.isContiguous(): expandedCat = expandedCat.copy(True) # return reference catalog return pipeBase.Struct( refCat=expandedCat, fluxField=fluxField, )
def __init__(self): schema = afwTable.SimpleTable.makeMinimalSchema() schema.addField("type", str, "type of region (e.g. box, circle)", size=10) schema.addField("radius", "Angle", "radius of mask (if type == circle") schema.addField("height", "Angle", "height of mask (if type == box)") schema.addField("width", "Angle", "width of mask (if type == box)") schema.addField("angle", "Angle", "rotation of mask (if type == box)") schema.addField("mag", float, "object's magnitude") self._catalog = afwTable.SimpleCatalog(schema) self._catalog.table.setMetadata(dafBase.PropertyList()) self.table = self._catalog.table self.addNew = self._catalog.addNew
def _saveMasterSchema(self, filename): """Generate and save the master catalog schema. Parameters ---------- filename : `str` An input file to read to get the input dtype. """ arr = self.file_reader.run(filename) schema, key_map = self.makeSchema(arr.dtype) catalog = afwTable.SimpleCatalog(schema) addRefCatMetadata(catalog) self._writeMasterSchema(catalog) return schema, key_map
def computePosRefCatalog(self, sourceCat): """Generate a position reference catalog from a source catalog """ minimalPosRefSchema = LoadReferenceObjectsTask.makeMinimalSchema( filterNameList=["r"], addCentroid=True) refCat = afwTable.SimpleCatalog(minimalPosRefSchema) for source in sourceCat: refObj = refCat.addNew() refObj.setCoord(source.getCoord()) refObj.set("centroid_x", source.getX()) refObj.set("centroid_y", source.getY()) refObj.set("hasCentroid", True) refObj.set("r_flux", source.get("slot_ApFlux_instFlux")) refObj.set("r_fluxErr", source.get("slot_ApFlux_instFluxErr")) refObj.setId(source.getId()) return refCat
def testFilterAliasMap(self): """Make a schema with filter aliases.""" for filterMap in ({}, {"camr": "r"}): config = TrivialLoader.ConfigClass() config.filterMap = filterMap loader = TrivialLoader(config=config) refSchema = TrivialLoader.makeMinimalSchema(filterNameList="r") loader._addFluxAliases( refSchema, anyFilterMapsToThis=config.anyFilterMapsToThis, filterMap=config.filterMap) self.assertIn("r_flux", refSchema) self.assertIn("r_fluxErr", refSchema) # camera filters aliases are named <filter>_camFlux if "camr" in filterMap: self.assertEqual(getRefFluxField(refSchema, "camr"), "camr_camFlux") else: with self.assertRaisesRegex( RuntimeError, r"Could not find flux field\(s\) camr_camFlux, camr_flux" ): getRefFluxField(refSchema, "camr") refCat = afwTable.SimpleCatalog(refSchema) refObj = refCat.addNew() refObj["r_flux"] = 1.23 self.assertAlmostEqual( refCat[0].get(getRefFluxField(refSchema, "r")), 1.23) if "camr" in filterMap: self.assertAlmostEqual( refCat[0].get(getRefFluxField(refSchema, "camr")), 1.23) refObj["r_fluxErr"] = 0.111 if "camr" in filterMap: self.assertEqual(refCat[0].get("camr_camFluxErr"), 0.111) fluxKey, fluxErrKey = getRefFluxKeys(refSchema, "r") self.assertEqual(refCat[0].get(fluxKey), 1.23) self.assertEqual(refCat[0].get(fluxErrKey), 0.111) if "camr" in filterMap: fluxKey, fluxErrKey = getRefFluxKeys(refSchema, "camr") self.assertEqual(refCat[0].get(fluxErrKey), 0.111) else: with self.assertRaises(RuntimeError): getRefFluxKeys(refSchema, "camr")
def setUp(self): crval = IcrsCoord(afwGeom.PointD(44., 45.)) crpix = afwGeom.PointD(0, 0) arcsecPerPixel = 1 / 3600.0 CD11 = arcsecPerPixel CD12 = 0 CD21 = 0 CD22 = arcsecPerPixel self.tanWcs = makeWcs(crval, crpix, CD11, CD12, CD21, CD22) S = 300 N = 5 if self.MatchClass == afwTable.ReferenceMatch: refSchema = LoadReferenceObjectsTask.makeMinimalSchema( filterNameList=["r"], addFluxSigma=True, addIsPhotometric=True) self.refCat = afwTable.SimpleCatalog(refSchema) elif self.MatchClass == afwTable.SourceMatch: refSchema = afwTable.SourceTable.makeMinimalSchema() self.refCat = afwTable.SourceCatalog(refSchema) else: raise RuntimeError("Unsupported MatchClass=%r" % (self.MatchClass, )) srcSchema = afwTable.SourceTable.makeMinimalSchema() SingleFrameMeasurementTask(schema=srcSchema) self.refCoordKey = afwTable.CoordKey(refSchema["coord"]) self.srcCoordKey = afwTable.CoordKey(srcSchema["coord"]) self.srcCentroidKey = afwTable.Point2DKey(srcSchema["slot_Centroid"]) self.sourceCat = afwTable.SourceCatalog(srcSchema) self.origSourceCat = afwTable.SourceCatalog( srcSchema) # undistorted copy self.matches = [] for i in np.linspace(0., S, N): for j in np.linspace(0., S, N): src = self.sourceCat.addNew() refObj = self.refCat.addNew() src.set(self.srcCentroidKey, afwGeom.Point2D(i, j)) c = self.tanWcs.pixelToSky(afwGeom.Point2D(i, j)) refObj.setCoord(c) self.matches.append(self.MatchClass(refObj, src, 0.0))
def _saveMasterSchema(self, filename): """Generate and save the master catalog schema. Parameters ---------- filename : `str` An input file to read to get the input dtype. """ arr = self.file_reader.run(filename) schema, key_map = self.makeSchema(arr.dtype) dataId = self.indexer.makeDataId( 'master_schema', self.config.dataset_config.ref_dataset_name) catalog = afwTable.SimpleCatalog(schema) addRefCatMetadata(catalog) self.butler.put(catalog, 'ref_cat', dataId=dataId) return schema, key_map
def _formatCatalog(self, fgcmStarCat, offsets): """ Turn an FGCM-formatted star catalog, applying zeropoint offsets. Parameters ---------- fgcmStarCat: `afwTable.SimpleCatalog` SimpleCatalog as output by fgcmcal offsets: `list` with len(self.bands) entries Zeropoint offsets to apply Returns ------- formattedCat: `afwTable.SimpleCatalog` SimpleCatalog suitable for using as a reference catalog """ sourceMapper = afwTable.SchemaMapper(fgcmStarCat.schema) minSchema = LoadIndexedReferenceObjectsTask.makeMinimalSchema( self.bands, addCentroid=False, addIsResolved=True, coordErrDim=0) sourceMapper.addMinimalSchema(minSchema) for band in self.bands: sourceMapper.editOutputSchema().addField('%s_nGood' % (band), type=np.int32) formattedCat = afwTable.SimpleCatalog(sourceMapper.getOutputSchema()) formattedCat.reserve(len(fgcmStarCat)) formattedCat.extend(fgcmStarCat, mapper=sourceMapper) # Note that we don't have to set `resolved` because the default is False for b, band in enumerate(self.bands): mag = fgcmStarCat['mag_std_noabs'][:, b] + offsets[b] # We want fluxes in Jy from calibrated AB magnitudes # (after applying offset) # TODO: Full implementation of RFC-549 will have all reference # catalogs in nJy instead of Jy. flux = afwImage.fluxFromABMag(mag) fluxErr = afwImage.fluxErrFromABMagErr( fgcmStarCat['magErr_std'][:, b], mag) formattedCat['%s_flux' % (band)][:] = flux formattedCat['%s_fluxErr' % (band)][:] = fluxErr formattedCat['%s_nGood' % (band)][:] = fgcmStarCat['ngood'][:, b] return formattedCat
def loadData(self, rangePix=3000, numPoints=25): """Load catalogs and make the match list This is a separate function so data can be reloaded if fitting more than once (each time a WCS is fit it may update the source catalog, reference catalog and match list) """ if self.MatchClass == afwTable.ReferenceMatch: refSchema = LoadReferenceObjectsTask.makeMinimalSchema( filterNameList=["r"], addIsPhotometric=True, addCentroid=True) self.refCat = afwTable.SimpleCatalog(refSchema) elif self.MatchClass == afwTable.SourceMatch: refSchema = afwTable.SourceTable.makeMinimalSchema() self.refCat = afwTable.SourceCatalog(refSchema) else: raise RuntimeError("Unsupported MatchClass=%r" % (self.MatchClass, )) srcSchema = afwTable.SourceTable.makeMinimalSchema() SingleFrameMeasurementTask(schema=srcSchema) self.srcCoordKey = afwTable.CoordKey(srcSchema["coord"]) self.srcCentroidKey = afwTable.Point2DKey(srcSchema["slot_Centroid"]) self.srcCentroidKey_xErr = srcSchema["slot_Centroid_xErr"].asKey() self.srcCentroidKey_yErr = srcSchema["slot_Centroid_yErr"].asKey() self.sourceCat = afwTable.SourceCatalog(srcSchema) self.matches = [] for i in np.linspace(0., rangePix, numPoints): for j in np.linspace(0., rangePix, numPoints): src = self.sourceCat.addNew() refObj = self.refCat.addNew() src.set(self.srcCentroidKey, lsst.geom.Point2D(i, j)) src.set(self.srcCentroidKey_xErr, 0.1) src.set(self.srcCentroidKey_yErr, 0.1) c = self.tanWcs.pixelToSky(i, j) refObj.setCoord(c) if False: print( "x,y = (%.1f, %.1f) pixels -- RA,Dec = (%.3f, %.3f) deg" % (i, j, c.toFk5().getRa().asDegrees(), c.toFk5().getDec().asDegrees())) self.matches.append(self.MatchClass(refObj, src, 0.0))
def getCatalog(self, dataId, schema): """Get a catalog from the butler or create it if it doesn't exist. Parameters ---------- dataId : `dict` Identifier for catalog to retrieve schema : `lsst.afw.table.Schema` Schema to use in catalog creation if the butler can't get it Returns ------- catalog : `lsst.afw.table.SimpleCatalog` The catalog specified by `dataId` """ if self.butler.datasetExists('ref_cat', dataId=dataId): return self.butler.get('ref_cat', dataId=dataId) return afwTable.SimpleCatalog(schema)