class FocalPlaneBackgroundConfig(Config): """Configuration for FocalPlaneBackground Note that `xSize` and `ySize` are floating-point values, as the focal plane frame is usually defined in units of microns or millimetres rather than pixels. As such, their values will need to be revised according to each particular camera. For this reason, no defaults are set for those. """ xSize = Field(dtype=float, doc="Bin size in x") ySize = Field(dtype=float, doc="Bin size in y") minFrac = Field(dtype=float, default=0.1, doc="Minimum fraction of bin size for good measurement") mask = ListField(dtype=str, doc="Mask planes to treat as bad", default=["BAD", "SAT", "INTRP", "DETECTED", "DETECTED_NEGATIVE", "EDGE", "NO_DATA"]) interpolation = ChoiceField( doc="how to interpolate the background values. This maps to an enum; see afw::math::Background", dtype=str, default="AKIMA_SPLINE", optional=True, allowed={ "CONSTANT": "Use a single constant value", "LINEAR": "Use linear interpolation", "NATURAL_SPLINE": "cubic spline with zero second derivative at endpoints", "AKIMA_SPLINE": "higher-level nonlinear spline that is more robust to outliers", "NONE": "No background estimation is to be attempted", }, ) doSmooth = Field(dtype=bool, default=False, doc="Do smoothing?") smoothScale = Field(dtype=float, default=2.0, doc="Smoothing scale, as a multiple of the bin size") binning = Field(dtype=int, default=64, doc="Binning to use for CCD background model (pixels)")
def makeTransferChoiceField(doc="How to transfer files (None for no transfer).", default=None): """Create a Config field with options for how to transfer files between data repositories. The allowed options for the field are exactly those supported by `lsst.daf.butler.Datastore.ingest`. Parameters ---------- doc : `str` Documentation for the configuration field. Returns ------- field : `lsst.pex.config.ChoiceField` Configuration field. """ return ChoiceField( doc=doc, dtype=str, allowed={"move": "move", "copy": "copy", "hardlink": "hard link", "symlink": "symbolic (soft) link"}, optional=True, default=default )
class RawIngestConfig(Config): transfer = ChoiceField( ("How to transfer files (None for no transfer)."), dtype=str, allowed={ "move": "move", "copy": "copy", "hardlink": "hard link", "symlink": "symbolic (soft) link" }, optional=True, ) conflict = ChoiceField( ("What to do if a raw Dataset with the same data ID as an " "ingested file already exists in the Butler's Collection."), dtype=str, allowed={ "ignore": ("Do not add the new file to the Collection. If " "'stash' is not None, the new file will be " "ingested into the stash Collection instead."), "fail": ("Raise RuntimeError if a conflict is encountered " "(which may then be caught if onError == 'continue')."), }, optional=False, default="ignore", ) stash = Field( "Name of an alternate Collection to hold Datasets that lose conflicts.", dtype=str, default=None, ) onError = ChoiceField( "What to do if an error (including fatal conflicts) occurs.", dtype=str, allowed={ "continue": "Warn and continue with the next file.", "break": ("Stop processing immediately, but leave " "already-ingested datasets in the repository."), "rollback": ("Stop processing and attempt to remove aleady-" "ingested datasets from the repository."), }, optional=False, default="continue", )
class SkyStatsConfig(Config): """Parameters controlling the measurement of sky statistics""" statistic = ChoiceField(dtype=str, default="MEANCLIP", doc="type of statistic to use for grid points", allowed={"MEANCLIP": "clipped mean", "MEAN": "unclipped mean", "MEDIAN": "median"}) clip = Field(doc="Clipping threshold for background", dtype=float, default=3.0) nIter = Field(doc="Clipping iterations for background", dtype=int, default=3) mask = ListField(doc="Mask planes to reject", dtype=str, default=["SAT", "DETECTED", "DETECTED_NEGATIVE", "BAD", "NO_DATA"])
class BackgroundConfig(Config): """Configuration for background measurement""" statistic = ChoiceField(dtype=str, default="MEANCLIP", doc="type of statistic to use for grid points", allowed={"MEANCLIP": "clipped mean", "MEAN": "unclipped mean", "MEDIAN": "median",}) xBinSize = RangeField(dtype=int, default=32, min=1, doc="Superpixel size in x") yBinSize = RangeField(dtype=int, default=32, min=1, doc="Superpixel size in y") algorithm = ChoiceField(dtype=str, default="NATURAL_SPLINE", optional=True, doc="How to interpolate the background values. " "This maps to an enum; see afw::math::Background", allowed={ "CONSTANT": "Use a single constant value", "LINEAR": "Use linear interpolation", "NATURAL_SPLINE": "cubic spline with zero second derivative at endpoints", "AKIMA_SPLINE": "higher-level nonlinear spline that is more robust to outliers", "NONE": "No background estimation is to be attempted", }) mask = ListField(dtype=str, default=["SAT", "BAD", "EDGE", "DETECTED", "DETECTED_NEGATIVE", "NO_DATA",], doc="Names of mask planes to ignore while estimating the background")
class CrosstalkConfig(Config): """Configuration for intra-CCD crosstalk removal""" minPixelToMask = Field( dtype=float, doc="Set crosstalk mask plane for pixels over this value.", default=45000) crosstalkMaskPlane = Field(dtype=str, doc="Name for crosstalk mask plane.", default="CROSSTALK") crosstalkBackgroundMethod = ChoiceField( dtype=str, doc="Type of background subtraction to use when applying correction.", default="None", allowed={ "None": "Do no background subtraction.", "AMP": "Subtract amplifier-by-amplifier background levels.", "DETECTOR": "Subtract detector level background." }, )
class CrosstalkConfig(Config): """Configuration for intra-detector crosstalk removal.""" minPixelToMask = Field( dtype=float, doc="Set crosstalk mask plane for pixels over this value.", default=45000 ) crosstalkMaskPlane = Field( dtype=str, doc="Name for crosstalk mask plane.", default="CROSSTALK" ) crosstalkBackgroundMethod = ChoiceField( dtype=str, doc="Type of background subtraction to use when applying correction.", default="None", allowed={ "None": "Do no background subtraction.", "AMP": "Subtract amplifier-by-amplifier background levels.", "DETECTOR": "Subtract detector level background." }, ) useConfigCoefficients = Field( dtype=bool, doc="Ignore the detector crosstalk information in favor of CrosstalkConfig values?", default=False, ) crosstalkValues = ListField( dtype=float, doc=("Amplifier-indexed crosstalk coefficients to use. This should be arranged as a 1 x nAmp**2 " "list of coefficients, such that when reshaped by crosstalkShape, the result is nAmp x nAmp. " "This matrix should be structured so CT * [amp0 amp1 amp2 ...]^T returns the column " "vector [corr0 corr1 corr2 ...]^T."), default=[0.0], ) crosstalkShape = ListField( dtype=int, doc="Shape of the coefficient array. This should be equal to [nAmp, nAmp].", default=[1], ) def getCrosstalk(self, detector=None): """Return a 2-D numpy array of crosstalk coefficients in the proper shape. Parameters ---------- detector : `lsst.afw.cameraGeom.detector` Detector that is to be crosstalk corrected. Returns ------- coeffs : `numpy.ndarray` Crosstalk coefficients that can be used to correct the detector. Raises ------ RuntimeError Raised if no coefficients could be generated from this detector/configuration. """ if self.useConfigCoefficients is True: coeffs = np.array(self.crosstalkValues).reshape(self.crosstalkShape) if detector is not None: nAmp = len(detector) if coeffs.shape != (nAmp, nAmp): raise RuntimeError("Constructed crosstalk coeffients do not match detector shape. " f"{coeffs.shape} {nAmp}") return coeffs elif detector is not None and detector.hasCrosstalk() is True: # Assume the detector defines itself consistently. return detector.getCrosstalk() else: raise RuntimeError("Attempted to correct crosstalk without crosstalk coefficients") def hasCrosstalk(self, detector=None): """Return a boolean indicating if crosstalk coefficients exist. Parameters ---------- detector : `lsst.afw.cameraGeom.detector` Detector that is to be crosstalk corrected. Returns ------- hasCrosstalk : `bool` True if this detector/configuration has crosstalk coefficients defined. """ if self.useConfigCoefficients is True and self.crosstalkValues is not None: return True elif detector is not None and detector.hasCrosstalk() is True: return True else: return False
class ApdbConfig(pexConfig.Config): db_url = Field(dtype=str, doc="SQLAlchemy database connection URI") isolation_level = ChoiceField(dtype=str, doc="Transaction isolation level", allowed={ "READ_COMMITTED": "Read committed", "READ_UNCOMMITTED": "Read uncommitted", "REPEATABLE_READ": "Repeatable read", "SERIALIZABLE": "Serializable" }, default="READ_COMMITTED", optional=True) connection_pool = Field( dtype=bool, doc=("If False then disable SQLAlchemy connection pool. " "Do not use connection pool when forking."), default=True) connection_timeout = Field( dtype=float, doc="Maximum time to wait time for database lock to be released before " "exiting. Defaults to sqlachemy defaults if not set.", default=None, optional=True) sql_echo = Field(dtype=bool, doc="If True then pass SQLAlchemy echo option.", default=False) dia_object_index = ChoiceField(dtype=str, doc="Indexing mode for DiaObject table", allowed={ 'baseline': "Index defined in baseline schema", 'pix_id_iov': "(pixelId, objectId, iovStart) PK", 'last_object_table': "Separate DiaObjectLast table" }, default='baseline') dia_object_nightly = Field(dtype=bool, doc="Use separate nightly table for DiaObject", default=False) read_sources_months = Field( dtype=int, doc="Number of months of history to read from DiaSource", default=12) read_forced_sources_months = Field( dtype=int, doc="Number of months of history to read from DiaForcedSource", default=12) dia_object_columns = ListField( dtype=str, doc= "List of columns to read from DiaObject, by default read all columns", default=[]) object_last_replace = Field( dtype=bool, doc="If True (default) then use \"upsert\" for DiaObjectsLast table", default=True) schema_file = Field( dtype=str, doc="Location of (YAML) configuration file with standard schema", default=_data_file_name("apdb-schema.yaml")) extra_schema_file = Field( dtype=str, doc="Location of (YAML) configuration file with extra schema", default=_data_file_name("apdb-schema-extra.yaml")) column_map = Field( dtype=str, doc="Location of (YAML) configuration file with column mapping", default=_data_file_name("apdb-afw-map.yaml")) prefix = Field(dtype=str, doc="Prefix to add to table names and index names", default="") explain = Field( dtype=bool, doc="If True then run EXPLAIN SQL command on each executed query", default=False) timer = Field(dtype=bool, doc="If True then print/log timing information", default=False) diaobject_index_hint = Field( dtype=str, doc="Name of the index to use with Oracle index hint", default=None, optional=True) dynamic_sampling_hint = Field( dtype=int, doc="If non-zero then use dynamic_sampling hint", default=0) cardinality_hint = Field(dtype=int, doc="If non-zero then use cardinality hint", default=0) def validate(self): super().validate() if self.isolation_level == "READ_COMMITTED" and self.db_url.startswith( "sqlite"): raise ValueError( "Attempting to run Apdb with SQLITE and isolation level 'READ_COMMITTED.' " "Use 'READ_UNCOMMITTED' instead.")
class L1dbprotoConfig(Config): FOV_deg = Field(dtype=float, doc="FOV in degrees", default=3.5) transient_per_visit = Field(dtype=int, doc="average number of transients per visit", default=100) false_per_visit = Field(dtype=int, doc="average number of false positives per visit", default=5050) divide = Field( dtype=int, doc=("Divide FOV into NUM*NUM tiles for parallel processing. " "If negative means camera style tiling with 5x5 rafts " "each subdivided in both directions into negated value " "of this parameter."), default=1) interval = Field(dtype=int, doc='Interval between visits in seconds, def: 45', default=45) forced_cutoff_days = Field( dtype=int, doc=("Period after which we stop forced photometry " "if there was no observed source, def: 30"), default=30) start_time = Field( dtype=str, default="2020-01-01T03:00:00", doc=('Starting time, format: YYYY-MM-DDThh:mm:ss' '. Time is assumed to be in UTC time zone. Used only at' ' first invocation to initialize database.')) start_visit_id = Field( dtype=int, default=1, doc='Starting visit ID. Used only at first invocation' ' to intialize database.') sources_file = Field(dtype=str, doc='Name of input file with sources (numpy data)', default="var_sources.npy") mp_mode = ChoiceField( dtype=str, allowed=dict(fork="Forking mode", mpi="MPI mode"), doc='multiprocessing mode, only for `divide > 1` or `divide < 0', default="fork") src_read_duty_cycle = Field( dtype=float, doc= ("Fraction of visits for which (forced) sources are read from database." ), default=1.) src_read_period = Field( dtype=int, doc=("Period for repating read/no-read cycles for (forced) sources."), default=1000) fill_empty_fields = Field( dtype=bool, doc= "If True then store random values for fields not explicitly filled.", default=False) @property def FOV_rad(self) -> float: """FOV in radians. """ return self.FOV_deg * math.pi / 180 @property def start_time_dt(self) -> DateTime: """start_time as DateTime. """ dt = DateTime(self.start_time, DateTime.TAI) return dt
class MatchedVisitMetricsConfig(Config): instrumentName = Field( dtype=str, optional=False, doc= "Instrument name to associate with verification specifications: e.g. HSC, CFHT, DECAM" ) datasetName = Field( dtype=str, optional=False, doc="Dataset name to associate metric measuremnts in SQuaSH") outputPrefix = Field( dtype=str, default="matchedVisit", doc= "Root name for output files: the filter name is appended to this+'_'.") metricsRepository = Field(dtype=str, default='verify_metrics', doc="Repository to read metrics and specs from.") brightSnrMin = Field( dtype=float, default=None, optional=True, doc= "Minimum PSF signal-to-noise ratio for a source to be considered bright." ) brightSnrMax = Field( dtype=float, default=None, optional=True, doc= "Maximum PSF signal-to-noise ratio for a source to be considered bright." ) makeJson = Field(dtype=bool, default=True, doc="Whether to write JSON outputs.") makePlots = Field(dtype=bool, default=True, doc="Whether to write plot outputs.") matchRadius = Field(dtype=float, default=1.0, doc="Match radius (arcseconds).") doApplyExternalPhotoCalib = Field( dtype=bool, default=False, doc=("Whether to apply external photometric calibration via an " "`lsst.afw.image.PhotoCalib` object. Uses the " "`externalPhotoCalibName` field to determine which calibration " "to load.")) externalPhotoCalibName = ChoiceField( dtype=str, doc= "Type of external PhotoCalib if `doApplyExternalPhotoCalib` is True.", default="jointcal", allowed={ "jointcal": "Use jointcal_photoCalib", "fgcm": "Use fgcm_photoCalib", "fgcm_tract": "Use fgcm_tract_photoCalib" }) doApplyExternalSkyWcs = Field( dtype=bool, default=False, doc=("Whether to apply external astrometric calibration via an " "`lsst.afw.geom.SkyWcs` object. Uses the `externalSkyWcsName` " "field to determine which calibration to load.")) externalSkyWcsName = ChoiceField( dtype=str, doc="Type of external SkyWcs if `doApplyExternalSkyWcs` is True.", default="jointcal", allowed={"jointcal": "Use jointcal_wcs"}) skipTEx = Field( dtype=bool, default=False, doc= "Skip TEx calculations (useful for older catalogs that don't have PsfShape measurements)." ) verbose = Field(dtype=bool, default=False, doc="More verbose output during validate calculations.")