def test_translator(self): header = self.header # Specify a translation class with self.assertWarns(UserWarning): # Since the translator is incomplete it should issue warnings v1 = ObservationInfo(header, translator_class=InstrumentTestTranslator) self.assertEqual(v1.instrument, "SCUBA_test") self.assertEqual(v1.telescope, "LSST") # Now automated class with self.assertWarns(UserWarning): # Since the translator is incomplete it should issue warnings v1 = ObservationInfo(header) self.assertEqual(v1.instrument, "SCUBA_test") self.assertEqual(v1.telescope, "LSST") location = v1.location.to_geodetic() self.assertAlmostEqual(location.height.to("m").to_value(), 4123.0, places=1) # Check that headers have been removed new_hdr = v1.stripped_header() self.assertNotIn("INSTRUME", new_hdr) self.assertNotIn("OBSGEO-X", new_hdr) self.assertIn("TELESCOP", new_hdr) # Check the list of cards that were used used = v1.cards_used self.assertIn("INSTRUME", used) self.assertIn("OBSGEO-Y", used) self.assertNotIn("TELESCOP", used)
def test_megaprime_stripping(self): header = read_test_file("fitsheader-megaprime.yaml", dir=self.datadir) v1 = ObservationInfo(header) # Check that headers have been removed new_hdr = v1.stripped_header() self.assertNotIn("INSTRUME", new_hdr) self.assertNotIn("TELESCOP", new_hdr) self.assertIn("CCD", new_hdr)
def test_failures(self): header = {} with self.assertRaises(TypeError): ObservationInfo(header, translator_class=ObservationInfo) with self.assertRaises(ValueError): ObservationInfo(header, translator_class=InstrumentTestTranslator, subset={"definitely_not_known"}) with self.assertRaises(ValueError): ObservationInfo(header, translator_class=InstrumentTestTranslator, required={"definitely_not_known"}) with self.assertLogs("astro_metadata_translator"): with self.assertWarns(UserWarning): ObservationInfo(header, translator_class=InstrumentTestTranslator, pedantic=False) with self.assertRaises(KeyError): with self.assertWarns(UserWarning): ObservationInfo(header, translator_class=InstrumentTestTranslator, pedantic=True) with self.assertLogs("astro_metadata_translator"): with self.assertWarns(UserWarning): ObservationInfo(header, translator_class=InstrumentTestTranslator, pedantic=False, filename="testfile1") with self.assertRaises(KeyError): with self.assertWarns(UserWarning): ObservationInfo(header, translator_class=InstrumentTestTranslator, pedantic=True, filename="testfile2") with self.assertRaises(NotImplementedError): with self.assertLogs("astro_metadata_translator", level="WARN"): ObservationInfo(header, translator_class=MissingMethodsTranslator) with self.assertRaises(KeyError): with self.assertWarns(UserWarning): with self.assertLogs("astro_metadata_translator", level="WARN"): ObservationInfo(header, translator_class=InstrumentTestTranslator, pedantic=False, required={"boresight_airmass"})
def test_obsinfo(self): """Test construction of ObservationInfo without header.""" obsinfo = makeObservationInfo(boresight_airmass=1.5, tracking_radec=None) self.assertIsInstance(obsinfo, ObservationInfo) self.assertIsNone(obsinfo.tracking_radec) self.assertAlmostEqual(obsinfo.boresight_airmass, 1.5) self.assertIsNone(obsinfo.observation_id) self.assertEqual(obsinfo.cards_used, set()) self.assertEqual(obsinfo.stripped_header(), {}) with self.assertRaises(TypeError): ObservationInfo.makeObservationInfo(boresight_airmass=1.5, observation_id=5) with self.assertRaises(KeyError): obsinfo = ObservationInfo.makeObservationInfo(unrecognized=1.5, keys="unknown")
def test_checker(self): filename = "latiss-future.yaml" from astro_metadata_translator.tests import read_test_file from astro_metadata_translator import ObservationInfo header = read_test_file(filename, self.datadir) obsInfo = ObservationInfo(header, pedantic=True) self.assertTrue(obsInfo)
def canStandardize(cls, header, filename=None, **kwargs): try: ObservationInfo(header, filename=filename) except ValueError: return False else: return True
def _calculate_dataset_info(self, header, filename): """Calculate a RawFileDatasetInfo from the supplied information. Parameters ---------- header : `Mapping` Header from the dataset. filename : `str` Filename to use for error messages. Returns ------- dataset : `RawFileDatasetInfo` The region, dataId, and observation information associated with this dataset. """ obsInfo = ObservationInfo(header) dataId = DataCoordinate.standardize(instrument=obsInfo.instrument, exposure=obsInfo.exposure_id, detector=obsInfo.detector_num, universe=self.universe) if obsInfo.instrument != self.instrument.getName(): raise ValueError(f"Incorrect instrument (expected {self.instrument.getName()}, " f"got {obsInfo.instrument}) for file {filename}.") FormatterClass = self.instrument.getRawFormatter(dataId) region = self._calculate_region_from_dataset_metadata(obsInfo, header, FormatterClass) return RawFileDatasetInfo(obsInfo=obsInfo, region=region, dataId=dataId)
def __call__(self, md, exposureId=None): """Construct a VisitInfo and strip associated data from the metadata. Parameters ---------- md : `lsst.daf.base.PropertyList` or `lsst.daf.base.PropertySet` Metadata to pull from. May be modified if ``stripHeader`` is ``True``. exposureId : `int`, optional Ignored. Here for compatibility with `MakeRawVisitInfo`. Returns ------- visitInfo : `lsst.afw.image.VisitInfo` `~lsst.afw.image.VisitInfo` derived from the header using a `~astro_metadata_translator.MetadataTranslator`. """ obsInfo = ObservationInfo(md, translator_class=self.metadataTranslator) if self.doStripHeader: # Strip all the cards out that were used for c in obsInfo.cards_used: del md[c] return self.observationInfo2visitInfo(obsInfo, log=self.log)
def std_raw(self, item, dataId, filter=True): """Standardize a raw dataset by converting it to an `~lsst.afw.image.Exposure` instead of an `~lsst.afw.image.Image`.""" exp = self._standardizeExposure( self.exposures['raw'], item, dataId, trimmed=False, setVisitInfo=False, # it's already set, and the metadata's stripped filter=False) if filter: obsInfo = ObservationInfo(exp.getMetadata(), translator_class=self.translatorClass) try: filt = afwImage.Filter(obsInfo.physical_filter) except LookupError: unknownName = "UNKNOWN" logger = lsst.log.Log.getLogger("LsstCamMapper") logger.warn( 'Unknown physical_filter "%s" for %s %s; replacing with "%s"', obsInfo.physical_filter, obsInfo.observation_id, obsInfo.detector_unique_name, unknownName) filt = afwImage.Filter(unknownName) exp.setFilter(filt) return exp
def _getAltAzZenithsFromSeqNum(butler, dayObs, seqNumList): """Get the alt, az and zenith angle for the seqNums of a given dayObs. Parameters ---------- butler : `lsst.daf.butler.Butler` The butler to query. dayObs : `int` The dayObs. seqNumList : `list` of `int` The seqNums for which to return the alt, az and zenith Returns ------- azimuths : `list` of `float` List of the azimuths for each seqNum elevations : `list` of `float` List of the elevations for each seqNum zeniths : `list` of `float` List of the zenith angles for each seqNum """ azimuths, elevations, zeniths = [], [], [] for seqNum in seqNumList: md = butler.get('raw.metadata', day_obs=dayObs, seq_num=seqNum, detector=0) obsInfo = ObservationInfo(md) alt = obsInfo.altaz_begin.alt.value az = obsInfo.altaz_begin.az.value elevations.append(alt) zeniths.append(90-alt) azimuths.append(az) return azimuths, elevations, zeniths
def assertObservationInfo(self, header, check_wcs=True, wcs_params=None, **kwargs): # noqa: N802 """Check contents of an ObservationInfo. Parameters ---------- header : `dict`-like Header to be checked. check_wcs : `bool`, optional Check the consistency of the RA/Dec and AltAz values. Checks are automatically disabled if the translated header does not appear to be "science". wcs_params : `dict`, optional Parameters to pass to `assertCoordinatesConsistent`. kwargs : `dict` Keys matching `ObservationInfo` properties with values to be tested. Raises ------ AssertionError A value in the ObservationInfo derived from the file is inconsistent. """ # For testing we force pedantic mode since we are in charge # of all the translations obsinfo = ObservationInfo(header, pedantic=True) # Check that we can pickle and get back the same properties newinfo = pickle.loads(pickle.dumps(obsinfo)) self.assertEqual(obsinfo, newinfo) # Check the properties for property, expected in kwargs.items(): calculated = getattr(obsinfo, property) msg = f"Comparing property {property}" if isinstance(expected, u.Quantity): calculated = calculated.to_value(unit=expected.unit) expected = expected.to_value() self.assertAlmostEqual(calculated, expected, msg=msg) elif isinstance(calculated, u.Quantity): # Only happens if the test is not a quantity when it should be self.fail( f"Expected {expected!r} for property {property} but got Quantity '{calculated}'" ) elif isinstance(expected, float): self.assertAlmostEqual(calculated, expected, msg=msg) else: self.assertEqual(calculated, expected, msg=msg) # Check the WCS consistency if check_wcs and obsinfo.observation_type == "science": if wcs_params is None: wcs_params = {} self.assertCoordinatesConsistent(obsinfo, **wcs_params)
def _calculate_dataset_info(self, header, filename): """Calculate a RawFileDatasetInfo from the supplied information. Parameters ---------- header : `Mapping` Header from the dataset. filename : `str` Filename to use for error messages. Returns ------- dataset : `RawFileDatasetInfo` The dataId, and observation information associated with this dataset. """ # To ensure we aren't slowed down for no reason, explicitly # list here the properties we need for the schema # Use a dict with values a boolean where True indicates # that it is required that we calculate this property. ingest_subset = { "altaz_begin": False, "boresight_rotation_coord": False, "boresight_rotation_angle": False, "dark_time": False, "datetime_begin": True, "datetime_end": True, "detector_num": True, "exposure_group": False, "exposure_id": True, "exposure_time": True, "instrument": True, "tracking_radec": False, "object": False, "observation_counter": False, "observation_id": True, "observation_reason": False, "observation_type": True, "observing_day": False, "physical_filter": True, "science_program": False, "visit_id": False, } obsInfo = ObservationInfo( header, pedantic=False, filename=filename, required={k for k in ingest_subset if ingest_subset[k]}, subset=set(ingest_subset)) dataId = DataCoordinate.standardize(instrument=obsInfo.instrument, exposure=obsInfo.exposure_id, detector=obsInfo.detector_num, universe=self.universe) return RawFileDatasetInfo(obsInfo=obsInfo, dataId=dataId)
def observationInfo(self): """The `~astro_metadata_translator.ObservationInfo` extracted from this file's metadata (`~astro_metadata_translator.ObservationInfo`, read-only). """ if self._observationInfo is None: self._observationInfo = ObservationInfo( self.metadata, translator_class=self.translatorClass) return self._observationInfo
def test_translator(self): header = self.header # Specify a translation class with self.assertWarns(UserWarning): # Since the translator is incomplete it should issue warnings v1 = ObservationInfo(header, translator_class=InstrumentTestTranslator) self.assertEqual(v1.instrument, "SCUBA_test") self.assertEqual(v1.telescope, "LSST") self.assertEqual(v1.exposure_id, 22) self.assertIsInstance(v1.exposure_id, int) self.assertEqual(v1.detector_name, "76") self.assertEqual(v1.relative_humidity, 55.0) self.assertIsInstance(v1.relative_humidity, float) self.assertEqual(v1.physical_filter, "76_55") # Now automated class with self.assertWarns(UserWarning): # Since the translator is incomplete it should issue warnings v1 = ObservationInfo(header) self.assertEqual(v1.instrument, "SCUBA_test") self.assertEqual(v1.telescope, "LSST") location = v1.location.to_geodetic() self.assertAlmostEqual(location.height.to("m").to_value(), 4123.0, places=1) # Check that headers have been removed new_hdr = v1.stripped_header() self.assertNotIn("INSTRUME", new_hdr) self.assertNotIn("OBSGEO-X", new_hdr) self.assertIn("TELESCOP", new_hdr) # Check the list of cards that were used used = v1.cards_used self.assertIn("INSTRUME", used) self.assertIn("OBSGEO-Y", used) self.assertNotIn("TELESCOP", used) # Stringification summary = str(v1) self.assertIn("datetime_begin", summary)
def observationInfo(self): """The `~astro_metadata_translator.ObservationInfo` extracted from this file's metadata (`~astro_metadata_translator.ObservationInfo`, read-only). """ if self._observationInfo is None: location = self.fileDescriptor.location path = location.path if location is not None else None self._observationInfo = ObservationInfo(self.metadata, translator_class=self.translatorClass, filename=path) return self._observationInfo
def makeFilter(self, metadata): obsInfo = ObservationInfo(metadata, translator_class=HscTranslator) # For historical reasons we need to return a short, lowercase filter # name that is neither a physical_filter nor an abstract_filter in Gen3 # or a filter data ID value in Gen2. # We'll suck that out of the definitions used to construct filters # for HSC in Gen2. This should all get cleaned up in RFC-541. for d in HSC_FILTER_DEFINITIONS: if obsInfo.physical_filter == d["name"] or obsInfo.physical_filter in d["alias"]: return Filter(d["name"], force=True) return Filter(obsInfo.physical_filter, force=True)
def readObsInfo(self): """Load unique ObservationInfo objects and filter associations from all scanned repositories. """ for repo in self.scanned.values(): config = self.config["mappers", repo.MapperClass.__name__, "VisitInfo"] instrumentObsInfo = self.obsInfo.setdefault(repo.MapperClass.__name__, {}) datasets = repo.datasets.get(config["DatasetType"], {}) for dataset in datasets.values(): obsInfoId = tuple(dataset.dataId[k] for k in config["uniqueKeys"]) if obsInfoId in instrumentObsInfo: continue md = readMetadata(dataset.fullPath) filt = repo.mapper.queryMetadata(config["DatasetType"], ("filter",), dataset.dataId)[0][0] instrumentObsInfo[obsInfoId] = (ObservationInfo(md), filt)
def _scrapeData(self, dayObs): """Load data into self.data skipping as necessary. Don't call directly! Don't call directly as the rebuild() function zeros out data for when it's a new dayObs.""" seqNums = getSeqNumsForDayObs(self.butler, dayObs) for seqNum in sorted(seqNums): if seqNum in self.data.keys(): continue dataId = {'day_obs': dayObs, 'seq_num': seqNum, 'detector': 0} md = self.butler.get('raw.metadata', dataId) self.data[seqNum] = md.toDict() self.data[seqNum]['ObservationInfo'] = ObservationInfo(md) print( f"Loaded data for seqNums {sorted(seqNums)[0]} to {sorted(seqNums)[-1]}" )
def read_file(file, failed): print(f"Analyzing {file}...", file=sys.stderr) try: md = read_metadata(file) if args.dumphdr: print(yaml.dump(md)) return obs_info = ObservationInfo(md, pedantic=True) if not args.quiet: print(f"{obs_info}") except Exception as e: if args.traceback: traceback.print_exc(file=sys.stdout) else: print(repr(e)) failed.append(file)
def testObservationInfo2VisitInfo(self): with self.assertWarns(UserWarning): obsInfo = ObservationInfo(self.header, translator_class=NewTranslator) # No log specified so no log message should appear visitInfo = MakeRawVisitInfoViaObsInfo.observationInfo2visitInfo( obsInfo) self.assertIsInstance(visitInfo, lsst.afw.image.VisitInfo) self.assertAlmostEqual(visitInfo.getExposureTime(), self.exposure_time.to_value("s")) self.assertEqual(visitInfo.getExposureId(), self.exposure_id) self.assertEqual( visitInfo.getDate(), DateTime("2001-01-02T03:04:06.123456789Z", DateTime.UTC))
def getInfoFromMetadata(self, md, info=None): """Attempt to pull the desired information out of the header. Parameters ---------- md : `lsst.daf.base.PropertyList` FITS header. info : `dict`, optional File properties, to be updated by this routine. If `None` it will be created. Returns ------- info : `dict` Translated information from the metadata. Updated form of the input parameter. Notes ----- This is done through two mechanisms: * translation: a property is set directly from the relevant header keyword. * translator: a property is set with the result of calling a method. The translator methods receive the header metadata and should return the appropriate value, or None if the value cannot be determined. This implementation constructs an `~astro_metadata_translator.ObservationInfo` object prior to calling each translator method, making the translated information available through the ``observationInfo`` attribute. """ # Always calculate a new ObservationInfo since getInfo calls # this method repeatedly for each header. self.observationInfo = ObservationInfo( md, translator_class=self._translatorClass, pedantic=False) info = super().getInfoFromMetadata(md, info) # Ensure that the translated ObservationInfo is cleared. # This avoids possible confusion. self.observationInfo = None return info
def ensureDimensions(self, file): """Extract metadata from a raw file and add Exposure and Visit Dimension entries. Any needed Instrument, Detector, and PhysicalFilter Dimension entries must exist in the Registry before `run` is called. Parameters ---------- file : `str` or path-like object Absolute path to the file to be ingested. Returns ------- headers : `list` of `~lsst.daf.base.PropertyList` Result of calling `readHeaders`. dataId : `DataId` Data ID dictionary, as returned by `extractDataId`. """ headers = self.readHeaders(file) obsInfo = ObservationInfo(headers[0]) # Extract a DataId that covers all of self.dimensions. fullDataId = self.extractDataId(file, headers, obsInfo=obsInfo) for dimension in self.dimensions: dimensionDataId = DataId(fullDataId, dimension=dimension) if dimensionDataId not in self.dimensionEntriesDone[dimension]: # Next look in the Registry dimensionEntryDict = self.butler.registry.findDimensionEntry( dimension, dimensionDataId) if dimensionEntryDict is None: if dimension.name in ("Visit", "Exposure"): # Add the entry into the Registry. self.butler.registry.addDimensionEntry( dimension, dimensionDataId) else: raise LookupError( f"Entry for {dimension.name} with ID {dimensionDataId} not found; must be " f"present in Registry prior to ingest.") # Record that we've handled this entry. self.dimensionEntriesDone[dimension].add(dimensionDataId) return headers, fullDataId
def attachRawWcsFromBoresight(exposure, dataIdForErrMsg=None): """Attach a WCS by extracting boresight, rotation, and camera geometry from an Exposure. Parameters ---------- exposure : `lsst.afw.image.Exposure` Image object with attached metadata and detector components. Return ------ attached : `bool` If True, a WCS component was successfully created and attached to ``exposure``. """ md = exposure.getMetadata() # Use the generic version since we do not have a mapper available to # tell us a specific translator to use. obsInfo = ObservationInfo(md) visitInfo = MakeRawVisitInfoViaObsInfo.observationInfo2visitInfo( obsInfo, log=logger) exposure.getInfo().setVisitInfo(visitInfo) # LATISS (and likely others) need flipping, DC2 etc do not flipX = False if obsInfo.instrument in ("LATISS", ): flipX = True if visitInfo.getBoresightRaDec().isFinite(): exposure.setWcs( createInitialSkyWcs(visitInfo, exposure.getDetector(), flipX=flipX)) return True if obsInfo.observation_type == "science": logger.warn( "Unable to set WCS from header as RA/Dec/Angle are unavailable%s", ("" if dataIdForErrMsg is None else " for dataId %s" % dataIdForErrMsg)) return False
def test_corrections(self): """Apply corrections before translation.""" header = self.header # Specify a translation class with self.assertWarns(UserWarning): # Since the translator is incomplete it should issue warnings v1 = ObservationInfo( header, translator_class=InstrumentTestTranslator, search_path=[os.path.join(TESTDIR, "data", "corrections")]) # These values should match the expected translation self.assertEqual(v1.instrument, "SCUBA_test") self.assertEqual(v1.detector_name, "76") self.assertEqual(v1.relative_humidity, 55.0) self.assertIsInstance(v1.relative_humidity, float) self.assertEqual(v1.physical_filter, "76_55") # These two should be the "corrected" values self.assertEqual(v1.telescope, "AuxTel") self.assertEqual(v1.exposure_id, 42)
def std_raw(self, item, dataId, filter=True): """Standardize a raw dataset by converting it to an `~lsst.afw.image.Exposure` instead of an `~lsst.afw.image.Image`.""" exp = self._standardizeExposure( self.exposures['raw'], item, dataId, trimmed=False, setVisitInfo=False, # it's already set, and the metadata's stripped setExposureId=False, filter=False) if filter: obsInfo = ObservationInfo(exp.getMetadata(), translator_class=self.translatorClass) band = self.filterDefinitions.physical_to_band[ obsInfo.physical_filter] filt = afwImage.FilterLabel(physical=obsInfo.physical_filter, band=band) exp.setFilterLabel(filt) return exp
def test_failures(self): header = {} with self.assertRaises(TypeError): ObservationInfo(header, translator_class=ObservationInfo) with self.assertLogs("astro_metadata_translator"): with self.assertWarns(UserWarning): ObservationInfo(header, translator_class=InstrumentTestTranslator, pedantic=False) with self.assertRaises(KeyError): with self.assertWarns(UserWarning): ObservationInfo(header, translator_class=InstrumentTestTranslator, pedantic=True) with self.assertLogs("astro_metadata_translator"): with self.assertWarns(UserWarning): ObservationInfo(header, translator_class=InstrumentTestTranslator, pedantic=False, filename="testfile1") with self.assertRaises(KeyError): with self.assertWarns(UserWarning): ObservationInfo(header, translator_class=InstrumentTestTranslator, pedantic=True, filename="testfile2") with self.assertRaises(NotImplementedError): with self.assertLogs("astro_metadata_translator", level="WARN"): ObservationInfo(header, translator_class=MissingMethodsTranslator)
def assertObservationInfo( self, header, filename=None, check_wcs=True, # noqa: N802 wcs_params=None, **kwargs): """Check contents of an ObservationInfo. Parameters ---------- header : `dict`-like Header to be checked. filename : `str`, optional Name of the filename associated with this header if known. check_wcs : `bool`, optional Check the consistency of the RA/Dec and AltAz values. Checks are automatically disabled if the translated header does not appear to be "science". wcs_params : `dict`, optional Parameters to pass to `assertCoordinatesConsistent`. kwargs : `dict` Keys matching `ObservationInfo` properties with values to be tested. Raises ------ AssertionError A value in the ObservationInfo derived from the file is inconsistent. """ # For testing we force pedantic mode since we are in charge # of all the translations obsinfo = ObservationInfo(header, pedantic=True, filename=filename) translator = obsinfo.translator_class_name # Check that we can pickle and get back the same properties newinfo = pickle.loads(pickle.dumps(obsinfo)) self.assertEqual(obsinfo, newinfo) # Check the properties for property, expected in kwargs.items(): calculated = getattr(obsinfo, property) msg = f"Comparing property {property} using translator {translator}" if isinstance(expected, u.Quantity) and calculated is not None: calculated = calculated.to_value(unit=expected.unit) expected = expected.to_value() self.assertAlmostEqual(calculated, expected, msg=msg) elif isinstance(calculated, u.Quantity): # Only happens if the test is not a quantity when it should be self.fail( f"Expected {expected!r} but got Quantity '{calculated}': {msg}" ) elif isinstance(expected, float) and calculated is not None: self.assertAlmostEqual(calculated, expected, msg=msg) else: self.assertEqual(calculated, expected, msg=msg) # Date comparison error reports will benefit by specifying ISO # format. Generate a new Time object at fixed precision # to work around the fact that (as of astropy 3.1) adding 0.0 seconds # to a Time results in a new Time object that is a few picoseconds in # the past. def _format_date_for_testing(date): if date is not None: date.format = "isot" date.precision = 9 date = Time(str(date), scale=date.scale, format="isot") return date datetime_begin = _format_date_for_testing(obsinfo.datetime_begin) datetime_end = _format_date_for_testing(obsinfo.datetime_end) # Check that dates are defined and end is the same or after beginning self.assertLessEqual(datetime_begin, datetime_end) # Check that exposure time is not outside datetime_end self.assertLessEqual(obsinfo.datetime_begin + obsinfo.exposure_time, obsinfo.datetime_end) # Check the WCS consistency if check_wcs and obsinfo.observation_type == "science": if wcs_params is None: wcs_params = {} self.assertCoordinatesConsistent(obsinfo, **wcs_params)
def generateStatsTextboxContent(self, section, doPrint=True): x, y = self.qfmResult.brightestObjCentroid exptime = self.exp.getInfo().getVisitInfo().getExposureTime() info = self.exp.getInfo() vi = info.getVisitInfo() fullFilterString = info.getFilterLabel().physicalLabel filt = fullFilterString.split(FILTER_DELIMITER)[0] grating = fullFilterString.split(FILTER_DELIMITER)[1] airmass = vi.getBoresightAirmass() rotangle = vi.getBoresightRotAngle().asDegrees() azAlt = vi.getBoresightAzAlt() az = azAlt[0].asDegrees() el = azAlt[1].asDegrees() md = self.exp.getMetadata() obsInfo = ObservationInfo(md, subset={'object'}) obj = obsInfo.object lines = [] if section == 0: lines.append("----- Star stats -----") lines.append(f"Star centroid @ {x:.0f}, {y:.0f}") lines.append(f"Star max pixel = {self.starPeakFlux:,.0f} ADU") lines.append( f"Star Ap25 flux = {self.qfmResult.brightestObjApFlux25:,.0f} ADU" ) lines.extend(["", ""]) # section break return '\n'.join([line for line in lines]) if section == 1: lines.append("------ Image stats ---------") imageMedian = np.median(self.exp.image.array) lines.append(f"Image median = {imageMedian:.2f} ADU") lines.append(f"Exposure time = {exptime:.2f} s") lines.extend(["", ""]) # section break return '\n'.join([line for line in lines]) if section == 2: lines.append("------- Rate stats ---------") lines.append( f"Star max pixel = {self.starPeakFlux/exptime:,.0f} ADU/s") lines.append( f"Spectrum contiuum = {self.continuumFlux98/exptime:,.1f} ADU/s" ) lines.extend(["", ""]) # section break return '\n'.join([line for line in lines]) if section == 3: lines.append("----- Observation info -----") lines.append(f"object = {obj}") lines.append(f"filter = {filt}") lines.append(f"grating = {grating}") lines.append(f"rotpa = {rotangle:.1f}") lines.append(f"az = {az:.1f}") lines.append(f"el = {el:.1f}") lines.append(f"airmass = {airmass:.3f}") return '\n'.join([line for line in lines]) if section == -1: # special -1 for debug lines.append("---------- Debug -----------") lines.append(f"spectrum bbox: {self.spectrumbbox}") lines.append( f"Good range = {self.goodSpectrumMinY},{self.goodSpectrumMaxY}" ) return '\n'.join([line for line in lines]) return
def read_file(file, hdrnum, print_trace, outstream=sys.stdout, errstream=sys.stderr, output_mode="verbose", write_heading=False): """Read the specified file and process it. Parameters ---------- file : `str` The file from which the header is to be read. hdrnum : `int` The HDU number to read. The primary header is always read and merged with the header from this HDU. print_trace : `bool` If there is an error reading the file and this parameter is `True`, a full traceback of the exception will be reported. If `False` prints a one line summary of the error condition. outstream : `io.StringIO`, optional Output stream to use for standard messages. Defaults to `sys.stdout`. errstream : `io.StringIO`, optional Stream to send messages that would normally be sent to standard error. Defaults to `sys.stderr`. output_mode : `str`, optional Output mode to use. Must be one of "verbose", "none", "table", "yaml", or "fixed". "yaml" and "fixed" can be modified with a "native" suffix to indicate that the output should be a representation of the native object type representing the header (which can be PropertyList or an Astropy header). Without this modify headers will be dumped as simple `dict` form. "auto" is not allowed by this point. write_heading: `bool`, optional If `True` and in table mode, write a table heading out before writing the content. Returns ------- success : `bool` `True` if the file was handled successfully, `False` if the file could not be processed. """ if output_mode not in OUTPUT_MODES: raise ValueError(f"Output mode of '{output_mode}' is not understood.") if output_mode == "auto": raise ValueError("Output mode can not be 'auto' here.") # This gets in the way in tabular mode if output_mode != "table": print(f"Analyzing {file}...", file=errstream) try: if file.endswith(".yaml"): md = read_test_file(file, ) if hdrnum != 0: # YAML can't have HDUs hdrnum = 0 else: md = read_metadata(file, 0) if md is None: print(f"Unable to open file {file}", file=errstream) return False if hdrnum != 0: mdn = read_metadata(file, int(hdrnum)) # Astropy does not allow append mode since it does not # convert lists to multiple cards. Overwrite for now if mdn is not None: md = merge_headers([md, mdn], mode="overwrite") else: print(f"HDU {hdrnum} was not found. Ignoring request.", file=errstream) if output_mode.endswith("native"): # Strip native and don't change type of md output_mode = output_mode[:-len("native")] else: # Rewrite md as simple dict for output md = {k: v for k, v in md.items()} if output_mode in ("yaml", "fixed"): if output_mode == "fixed": fix_header(md, filename=file) # The header should be written out in the insertion order print(yaml.dump(md, sort_keys=False), file=outstream) return True obs_info = ObservationInfo(md, pedantic=True, filename=file) if output_mode == "table": columns = [ "{:{fmt}}".format(getattr(obs_info, c["attr"]), fmt=c["format"]) for c in TABLE_COLUMNS ] if write_heading: # Construct headings of the same width as the items # we have calculated. Doing this means we don't have to # work out for ourselves how many characters will be used # for non-strings (especially Quantity) headings = [] separators = [] for thiscol, defn in zip(columns, TABLE_COLUMNS): width = len(thiscol) headings.append("{:{w}.{w}}".format(defn["label"], w=width)) separators.append("-" * width) print(" ".join(headings), file=outstream) print(" ".join(separators), file=outstream) row = " ".join(columns) print(row, file=outstream) elif output_mode == "verbose": print(f"{obs_info}", file=outstream) elif output_mode == "none": pass else: raise RuntimeError( f"Output mode of '{output_mode}' not recognized but should be known." ) except Exception as e: if print_trace: traceback.print_exc(file=outstream) else: print(repr(e), file=outstream) return False return True
def plotMountTracking(dataId, butler, client, figure, saveFilename, logger): """Queries EFD for given exposure and checks if there were tracking errors. Parameters ---------- dataId : `dict` The dataId for quich to plot the mount torques. butler : `lsst.daf.butler.Butler` The butler to use to retrieve the image metadata. client : `lsst_efd_client.Client` The EFD client to retrieve the mount torques. figure : `matplotlib.Figure` A matplotlib figure to re-use. Necessary to pass this in to prevent an ever-growing figure count and the ensuing memory leak. saveFilename : `str` Full path and filename to save the plot to. logger : `logging.Logger` The logger. Returns ------- plotted : `bool` True if the dataId was plotted, False if it was skipped. """ # lsst-efd-client is not a required import at the top here, but is # implicitly required as a client is passed into this function so is not # rechecked here. start = time.time() expRecord = bu.getExpRecordFromDataId(butler, dataId) dayString = dayObsIntToString(expRecord.day_obs) seqNumString = str(expRecord.seq_num) dataIdString = f"{dayString} - seqNum {seqNumString}" imgType = expRecord.observation_type.upper() tStart = expRecord.timespan.begin.tai.to_value("isot") tEnd = expRecord.timespan.end.tai.to_value("isot") elevation = 90 - expRecord.zenith_angle exptime = expRecord.exposure_time # TODO: DM-33859 remove this once it can be got from the expRecord md = butler.get('raw.metadata', dataId, detector=0) obsInfo = ObservationInfo(md) azimuth = obsInfo.altaz_begin.az.value logger.debug( f"dataId={dataIdString}, imgType={imgType}, Times={tStart}, {tEnd}") if imgType not in GOOD_IMAGE_TYPES: logger.info(f'Skipping image type {imgType} for {dataIdString}') return False if exptime < 1.99: logger.info('Skipping sub 2s expsoure') return False end = time.time() elapsed = end - start logger.debug(f"Elapsed time for butler query = {elapsed}") start = time.time() # Time base in the EFD is still a big mess. Although these times are in # UTC, it is necessary to tell the code they are in TAI. Then it is # necessary to tell the merge_packed_time_series to use UTC. # After doing all of this, there is still a 2 second offset, # which is discussed in JIRA ticket DM-29243, but not understood. t_start = Time(tStart, scale='tai') t_end = Time(tEnd, scale='tai') logger.debug(f"Tstart = {t_start.isot}, Tend = {t_end.isot}") mount_position = _getEfdData(client, "lsst.sal.ATMCS.mount_AzEl_Encoders", t_start, t_end) nasmyth_position = _getEfdData(client, "lsst.sal.ATMCS.mount_Nasmyth_Encoders", t_start, t_end) torques = _getEfdData(client, "lsst.sal.ATMCS.measuredTorque", t_start, t_end) logger.debug("Length of time series", len(mount_position)) az = mpts(mount_position, 'azimuthCalculatedAngle', stride=1) el = mpts(mount_position, 'elevationCalculatedAngle', stride=1) rot = mpts(nasmyth_position, 'nasmyth2CalculatedAngle', stride=1) az_torque_1 = mpts(torques, 'azimuthMotor1Torque', stride=1) az_torque_2 = mpts(torques, 'azimuthMotor2Torque', stride=1) el_torque = mpts(torques, 'elevationMotorTorque', stride=1) rot_torque = mpts(torques, 'nasmyth2MotorTorque', stride=1) end = time.time() elapsed = end - start logger.debug(f"Elapsed time to get the data = {elapsed}") start = time.time() # Calculate the tracking errors az_vals = np.array(az.values[:, 0]) el_vals = np.array(el.values[:, 0]) rot_vals = np.array(rot.values[:, 0]) times = np.array(az.values[:, 1]) logger.debug("Length of packed time series", len(az_vals)) # Fit with a linear az_fit = np.polyfit(times, az_vals, 1) el_fit = np.polyfit(times, el_vals, 1) rot_fit = np.polyfit(times, rot_vals, 1) az_model = az_fit[0] * times + az_fit[1] el_model = el_fit[0] * times + el_fit[1] rot_model = rot_fit[0] * times + rot_fit[1] # Errors in arcseconds az_error = (az_vals - az_model) * 3600 el_error = (el_vals - el_model) * 3600 rot_error = (rot_vals - rot_model) * 3600 # Calculate RMS az_rms = np.sqrt(np.mean(az_error * az_error)) el_rms = np.sqrt(np.mean(el_error * el_error)) rot_rms = np.sqrt(np.mean(rot_error * rot_error)) end = time.time() elapsed = end - start logger.debug(f"Elapsed time for error calculations = {elapsed}") start = time.time() # Plotting figure.clear() title = f"Mount Tracking {dataIdString}, Azimuth = {azimuth:.1f}, Elevation = {elevation:.1f}" plt.suptitle(title, fontsize=18) # Azimuth axis plt.subplot(3, 3, 1) ax1 = az['azimuthCalculatedAngle'].plot(legend=True, color='red') ax1.set_title("Azimuth axis", fontsize=16) ax1.axvline(az.index[0], color="red", linestyle="--") ax1.set_xticks([]) ax1.set_ylabel("Degrees") plt.subplot(3, 3, 4) plt.plot(times, az_error, color='red') plt.title(f"Azimuth RMS error = {az_rms:.2f} arcseconds") plt.ylim(-10.0, 10.0) plt.xticks([]) plt.ylabel("Arcseconds") plt.subplot(3, 3, 7) ax7 = az_torque_1['azimuthMotor1Torque'].plot(legend=True, color='blue') ax7 = az_torque_2['azimuthMotor2Torque'].plot(legend=True, color='green') ax7.axvline(az.index[0], color="red", linestyle="--") ax7.set_ylabel("Torque (motor current in amps)") # Elevation axis plt.subplot(3, 3, 2) ax2 = el['elevationCalculatedAngle'].plot(legend=True, color='green') ax2.set_title("Elevation axis", fontsize=16) ax2.axvline(az.index[0], color="red", linestyle="--") ax2.set_xticks([]) plt.subplot(3, 3, 5) plt.plot(times, el_error, color='green') plt.title(f"Elevation RMS error = {el_rms:.2f} arcseconds") plt.ylim(-10.0, 10.0) plt.xticks([]) plt.subplot(3, 3, 8) ax8 = el_torque['elevationMotorTorque'].plot(legend=True, color='blue') ax8.axvline(az.index[0], color="red", linestyle="--") ax8.set_ylabel("Torque (motor current in amps)") # Nasmyth2 rotator axis plt.subplot(3, 3, 3) ax3 = rot['nasmyth2CalculatedAngle'].plot(legend=True, color='blue') ax3.set_title("Nasmyth2 axis", fontsize=16) ax3.axvline(az.index[0], color="red", linestyle="--") ax3.set_xticks([]) plt.subplot(3, 3, 6) plt.plot(times, rot_error, color='blue') plt.title(f"Nasmyth RMS error = {rot_rms:.2f} arcseconds") plt.ylim(-100.0, 100.0) plt.subplot(3, 3, 9) ax9 = rot_torque['nasmyth2MotorTorque'].plot(legend=True, color='blue') ax9.axvline(az.index[0], color="red", linestyle="--") ax9.set_ylabel("Torque (motor current in amps)") plt.savefig(saveFilename) end = time.time() elapsed = end - start logger.debug(f"Elapsed time for plots = {elapsed}") return True