def __init__(self, camera=None, detector=None, log=None, **kwargs): self._instrument = None self._raftName = None self._slotName = None self._detectorName = None self._detectorSerial = None self._detectorId = None self._filter = None self._calibId = None self._metadata = PropertyList() self.setMetadata(PropertyList()) self.calibInfoFromDict(kwargs) # Define the required attributes for this calibration. self.requiredAttributes = set(['_OBSTYPE', '_SCHEMA', '_VERSION']) self.requiredAttributes.update([ '_instrument', '_raftName', '_slotName', '_detectorName', '_detectorSerial', '_detectorId', '_filter', '_calibId', '_metadata' ]) self.log = log if log else Log.getLogger(__name__.partition(".")[2]) if detector: self.fromDetector(detector) self.updateMetadata(camera=camera, detector=detector)
def write_fits(self, filename): """Write this object to a file. Parameters ---------- filename : `str` Name of file to write. """ # Create primary HDU with global metadata. metadata = PropertyList() metadata["HAS_DEFAULT"] = self.default_extended_psf is not None if self.focal_plane_regions: metadata["HAS_REGIONS"] = True metadata["REGION_NAMES"] = list(self.focal_plane_regions.keys()) for region, e_psf_region in self.focal_plane_regions.items(): metadata[region] = e_psf_region.detector_list else: metadata["HAS_REGIONS"] = False fits_primary = afwFits.Fits(filename, "w") fits_primary.createEmpty() fits_primary.writeMetadata(metadata) fits_primary.closeFile() # Write default extended PSF. if self.default_extended_psf is not None: default_hdu_metadata = PropertyList() default_hdu_metadata.update({ "REGION": "DEFAULT", "EXTNAME": "IMAGE" }) self.default_extended_psf.image.writeFits( filename, metadata=default_hdu_metadata, mode="a") default_hdu_metadata.update({ "REGION": "DEFAULT", "EXTNAME": "MASK" }) self.default_extended_psf.mask.writeFits( filename, metadata=default_hdu_metadata, mode="a") # Write extended PSF for each focal plane region. for j, (region, e_psf_region) in enumerate(self.focal_plane_regions.items()): metadata = PropertyList() metadata.update({"REGION": region, "EXTNAME": "IMAGE"}) e_psf_region.extended_psf_image.image.writeFits(filename, metadata=metadata, mode="a") metadata.update({"REGION": region, "EXTNAME": "MASK"}) e_psf_region.extended_psf_image.mask.writeFits(filename, metadata=metadata, mode="a")
def __init__(self, table=None, detector=None, override=False, log=None): self._detectorName = None self._detectorSerial = None self._detectorId = None self._metadata = PropertyList() self.linearityCoeffs = dict() self.linearityType = dict() self.linearityThreshold = dict() self.linearityMaximum = dict() self.linearityUnits = dict() self.linearityBBox = dict() self.fitParams = dict() self.fitParamsErr = dict() self.linearityFitReducedChiSquared = dict() self.override = override self.populated = False self.log = log self.tableData = None if table is not None: if len(table.shape) != 2: raise RuntimeError( "table shape = %s; must have two dimensions" % (table.shape, )) if table.shape[1] < table.shape[0]: raise RuntimeError("table shape = %s; indices are switched" % (table.shape, )) self.tableData = np.array(table, order="C") if detector: self.fromDetector(detector)
def testBasics(self): """Check basic formatting and skipping bad values """ metadata = PropertyList() dataList = [ ("ABOOL", True), ("AFLOAT", 1.2e25), ("ANINT", -5), ("LONGNAME1", 1), # name is longer than 8 characters; skip it ("LONGSTR", "skip this item because the formatted value " "is too long: longer than 80 characters "), ("ASTRING1", "value for string"), ] for name, value in dataList: metadata.set(name, value) header = makeLimitedFitsHeader(metadata) expectedLines = [ # without padding to 80 chars "ABOOL = 1", "AFLOAT = 1.2E+25", "ANINT = -5", "ASTRING1= 'value for string'", ] expectedHeader = "".join("%-80s" % val for val in expectedLines) self.assertEqual(header, expectedHeader) self.checkExcludeNames(metadata, expectedLines)
def _makeStamps(self, nStamps, stampSize): randState = np.random.RandomState(42) stampList = [] for i in range(nStamps): stamp = afwImage.maskedImage.MaskedImageF(stampSize, stampSize) stamp.image.array += randState.rand(stampSize, stampSize) stamp.mask.array += 10 stamp.variance.array += 100 stampList.append(stamp) ras = np.arange(nStamps) decs = np.arange(nStamps) + 5 centX = np.arange(nStamps) + 20 centY = np.arange(nStamps) + 25 detectorNames = ["R22_S11"] * nStamps camNames = ["LSSTCam"] * nStamps dfcTypes = [DefocalType.Extra.value] * nStamps halfStampIdx = int(nStamps / 2) dfcTypes[:halfStampIdx] = [DefocalType.Intra.value] * halfStampIdx metadata = PropertyList() metadata["RA_DEG"] = ras metadata["DEC_DEG"] = decs metadata["CENT_X"] = centX metadata["CENT_Y"] = centY metadata["DET_NAME"] = detectorNames metadata["CAM_NAME"] = camNames metadata["DFC_TYPE"] = dfcTypes return stampList, metadata
def make_dm_wcs(galsim_wcs): """ convert galsim wcs to stack wcs Parameters ---------- galsim_wcs: galsim WCS Should be TAN or TAN-SIP Returns ------- DM Stack sky wcs """ if galsim_wcs.wcs_type == 'TAN': crpix = galsim_wcs.crpix # DM uses 0 offset, galsim uses FITS 1 offset stack_crpix = Point2D(crpix[0] - 1, crpix[1] - 1) cd_matrix = galsim_wcs.cd crval = geom.SpherePoint( galsim_wcs.center.ra / coord.radians, galsim_wcs.center.dec / coord.radians, geom.radians, ) stack_wcs = makeSkyWcs( crpix=stack_crpix, crval=crval, cdMatrix=cd_matrix, ) elif galsim_wcs.wcs_type == 'TAN-SIP': # No currently supported # this works with the 1-offset assumption from galsim # # this is not used if the lower bounds are 1, but the extra keywords # GS_{X,Y}MIN are set which we will remove below fake_bounds = galsim.BoundsI(1, 10, 1, 10) hdr = {} galsim_wcs.writeToFitsHeader(hdr, fake_bounds) del hdr["GS_XMIN"] del hdr["GS_YMIN"] metadata = PropertyList() for key, value in hdr.items(): metadata.set(key, value) stack_wcs = makeSkyWcs(metadata) return stack_wcs
def testIgnoreKeywords(self): """Check that certain keywords are ignored in read/write of headers""" # May appear only once in the FITS file (because cfitsio will insist on putting them there) single = ["SIMPLE", "BITPIX", "EXTEND", "NAXIS"] # May not appear at all in the FITS file (cfitsio doesn't write these by default) notAtAll = [ # FITS core keywords "GCOUNT", "PCOUNT", "XTENSION", "BSCALE", "BZERO", "TZERO", "TSCAL", # FITS compression keywords "ZBITPIX", "ZIMAGE", "ZCMPTYPE", "ZSIMPLE", "ZEXTEND", "ZBLANK", "ZDATASUM", "ZHECKSUM", "ZNAXIS", "ZTILE", "ZNAME", "ZVAL", # Not essential these be excluded, but will prevent fitsverify warnings "DATASUM", "CHECKSUM", ] # Additional keywords to check; these should go straight through # Some of these are longer/shorter versions of strings above, # to test that the checks for just the start of strings is working. others = ["FOOBAR", "SIMPLETN", "DATASUMX", "NAX", "SIM"] header = PropertyList() for ii, key in enumerate(single + notAtAll + others): header.add(key, ii) fitsFile = lsst.afw.fits.MemFileManager() with lsst.afw.fits.Fits(fitsFile, "w") as fits: fits.createEmpty() fits.writeMetadata(header) with lsst.afw.fits.Fits(fitsFile, "r") as fits: metadata = fits.readMetadata() for key in single: self.assertEqual(metadata.valueCount(key), 1, key) for key in notAtAll: self.assertEqual(metadata.valueCount(key), 0, key) for key in others: self.assertEqual(metadata.valueCount(key), 1, key)