Esempio n. 1
0
class ParseConfig(Config):
    """Configuration for ParseTask"""
    translation = DictField(keytype=str, itemtype=str, default={},
                            doc="Translation table for property --> header")
    translators = DictField(keytype=str, itemtype=str, default={},
                            doc="Properties and name of translator method")
    defaults = DictField(keytype=str, itemtype=str, default={},
                         doc="Default values if header is not present")
    hdu = Field(dtype=int, default=0, doc="HDU to read for metadata")
    extnames = ListField(dtype=str, default=[], doc="Extension names to search for")
Esempio n. 2
0
class RegisterConfig(Config):
    """Configuration for the RegisterTask"""
    table = Field(dtype=str, default="raw", doc="Name of table")
    columns = DictField(
        keytype=str,
        itemtype=str,
        doc="List of columns for raw table, with their types",
        itemCheck=lambda x: x in ("text", "int", "double"),
        default={
            'object': 'text',
            'visit': 'int',
            'ccd': 'int',
            'filter': 'text',
            'date': 'text',
            'taiObs': 'text',
            'expTime': 'double',
        },
    )
    unique = ListField(
        dtype=str,
        doc="List of columns to be declared unique for the table",
        default=["visit", "ccd"])
    visit = ListField(dtype=str,
                      default=["visit", "object", "date", "filter"],
                      doc="List of columns for raw_visit table")
    ignore = Field(dtype=bool,
                   default=False,
                   doc="Ignore duplicates in the table?")
    permissions = Field(dtype=int,
                        default=0o664,
                        doc="Permissions mode for registry; 0o664 = rw-rw-r--")
Esempio n. 3
0
class CalibrateCatalogConfig(Config):
    doApplyCalib = Field(dtype=bool, default=True, doc="Calibrate fluxes to magnitudes?")
    srcSchemaMap = DictField(
        doc="Mapping between different stack (e.g. HSC vs. LSST) schema names",
        keytype=str,
        itemtype=str,
        default=None,
        optional=True)
class PropagateVisitFlagsConfig(Config):
    """!Configuration for propagating flags to coadd"""
    flags = DictField(keytype=str, itemtype=float,
                      default={"calib_psf_candidate": 0.2, "calib_psf_used": 0.2, "calib_psf_reserved": 0.2,
                               "calib_astrometry_used": 0.2, "calib_photometry_used": 0.2,
                               "calib_photometry_reserved": 0.2, },
                      doc=("Source catalog flags to propagate, with the threshold of relative occurrence "
                           "(valid range: [0-1], default is 0.2).  Coadd object will have flag set if the "
                           "fraction of input visits in which it is flagged is greater than the threshold."))
    matchRadius = Field(dtype=float, default=0.2, doc="Source matching radius (arcsec)")
    ccdName = Field(dtype=str, default='ccd', doc="Name of ccd to give to butler")
Esempio n. 5
0
class CombineDiaForcedConfig(Config):
    coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
    ccdKey = Field(dtype=str,
                   default='detector',
                   doc="Name of ccd to give to butler")
    keepFields = DictField(
        keytype=str,
        itemtype=str,
        doc=
        'Keep these fields from the diaSrc catalogs.  Will be averaged over matches',
        default={
            'psf_flux': 'base_PsfFlux_instFlux',
            'psf_flux_err': 'base_PsfFlux_instFluxErr'
        })
    storage = Field(dtype=str, default="pickle", doc="pandas storage format")
Esempio n. 6
0
class PropagateVisitFlagsConfig(Config):
    """!Configuration for propagating flags to coadd"""
    flags = DictField(
        keytype=str,
        itemtype=float,
        default={
            "calib_psfCandidate": 0.2,
            "calib_psfUsed": 0.2,
        },
        doc=
        "Source catalog flags to propagate, with the threshold of relative occurrence."
    )
    matchRadius = Field(dtype=float,
                        default=0.2,
                        doc="Source matching radius (arcsec)")
Esempio n. 7
0
class ProcessFocusConfig(Config):
    focus = ConfigField(dtype=FocusConfig, doc="Focus determination")
    zemax = DictField(keytype=str, itemtype=str, default={},
                      doc="Mapping from filter name to zemax configuration filename")
    isr = ConfigurableField(target=SubaruIsrTask, doc="Instrument Signature Removal")
    installPsf = ConfigurableField(target=InstallGaussianPsfTask, doc="Install a simple PSF model")
    background = ConfigField(dtype=measAlg.estimateBackground.ConfigClass, doc="Background removal")
    detectAndMeasure = ConfigurableField(target=DetectAndMeasureTask, doc="Source detection and measurement")
    starSelector = ConfigurableField(target=measAlg.ObjectSizeStarSelectorTask,
                                     doc="Star selection algorithm")
    doWrite = Field(dtype=bool, default=True, doc="Write processed image?")

    def setDefaults(self):
        """These defaults are suitable for HSC, but may be useful
        for other cameras if the focus code is employed elsewhere.
        """
        Config.setDefaults(self)
        zemaxBase = os.path.join(os.environ["OBS_SUBARU_DIR"], "hsc", "zemax_config%d_0.0.dat")
        self.zemax = dict([(f, zemaxBase % n) for f,n in [
                    ('g', 9), ('r', 1), ('i', 3), ('z', 5), ('y', 7),
                    ('N921', 5), ('N816', 3), ('N1010', 7), ('N387', 9), ('N515', 9),
                    ]])
        self.load(os.path.join(os.environ["OBS_SUBARU_DIR"], "config", "hsc", "isr.py"))
        self.installPsf.fwhm = 9 # pixels
        self.installPsf.width = 31 # pixels
        self.detectAndMeasure.detection.includeThresholdMultiplier = 3.0
        self.detectAndMeasure.measurement.algorithms.names.add("base_GaussianCentroid")
        self.detectAndMeasure.measurement.slots.centroid = "base_GaussianCentroid"
        # set up simple shape, if available (because focus calibrations are for that)
        # If it's not available, we'll crash later; but we don't want to crash here (brings everything down)!
        if haveSimpleShape:
            self.detectAndMeasure.measurement.algorithms.names.add("ext_simpleShape_SimpleShape")
            self.detectAndMeasure.measurement.algorithms["ext_simpleShape_SimpleShape"].sigma = 5.0 # pixels

        # set up background estimate
        self.background.ignoredPixelMask = ['EDGE', 'NO_DATA', 'DETECTED', 'DETECTED_NEGATIVE', 'BAD']
        self.detectAndMeasure.detection.background.algorithm='LINEAR'
        self.detectAndMeasure.doDeblend = False
        self.starSelector.badFlags = ["base_PixelFlags_flag_edge",
                                      "base_PixelFlags_flag_interpolatedCenter",
                                      "base_PixelFlags_flag_saturatedCenter",
                                      "base_PixelFlags_flag_bad",
                                      ]
        self.starSelector.sourceFluxField = "base_GaussianFlux_flux"
        self.starSelector.widthMax = 20.0
        self.starSelector.widthStdAllowed = 5.0
Esempio n. 8
0
class ConvertRepoConfig(Config):
    raws = ConfigurableField(
        "Configuration for subtask responsible for ingesting raws and adding "
        "visit and exposure dimension entries.",
        target=RawIngestTask,
    )
    skyMaps = ConfigDictField(
        "Mapping from Gen3 skymap name to the parameters used to construct a "
        "BaseSkyMap instance.  This will be used to associate names with "
        "existing skymaps found in the Gen2 repo.",
        keytype=str,
        itemtype=ConvertRepoSkyMapConfig,
        default={})
    rootSkyMapName = Field(
        "Name of a Gen3 skymap (an entry in ``self.skyMaps``) to assume for "
        "datasets in the root repository when no SkyMap is found there. ",
        dtype=str,
        optional=True,
        default=None,
    )
    collections = DictField(
        "Special collections (values) for certain dataset types (keys).  "
        "These are used in addition to rerun collections for datasets in "
        "reruns.  The 'raw' dataset must have an entry here if it is to be "
        "converted.",
        keytype=str,
        itemtype=str,
        default={
            "deepCoadd_skyMap": "skymaps",
            "brightObjectMask": "masks",
        })
    storageClasses = DictField(
        "Mapping from dataset type name or Gen2 policy entry (e.g. 'python' "
        "or 'persistable') to the Gen3 StorageClass name.",
        keytype=str,
        itemtype=str,
        default={
            "BaseSkyMap": "SkyMap",
            "BaseCatalog": "Catalog",
            "BackgroundList": "Background",
            "raw": "Exposure",
            "MultilevelParquetTable": "DataFrame",
            "ParquetTable": "DataFrame",
            "SkyWcs": "Wcs",
        })
    doRegisterInstrument = Field(
        "If True (default), add dimension records for the Instrument and its "
        "filters and detectors to the registry instead of assuming they are "
        "already present.",
        dtype=bool,
        default=True,
    )
    doWriteCuratedCalibrations = Field(
        "If True (default), ingest human-curated calibrations directly via "
        "the Instrument interface.  Note that these calibrations are never "
        "converted from Gen2 repositories.",
        dtype=bool,
        default=True,
    )
    refCats = ListField(
        "The names of reference catalogs (subdirectories under ref_cats) to "
        "be converted",
        dtype=str,
        default=[])
    fileIgnorePatterns = ListField(
        "Filename globs that should be ignored instead of being treated as "
        "datasets.",
        dtype=str,
        default=[
            "README.txt", "*~?", "butler.yaml", "gen3.sqlite3",
            "registry.sqlite3", "calibRegistry.sqlite3", "_mapper", "_parent",
            "repositoryCfg.yaml"
        ])
    datasetIncludePatterns = ListField(
        "Glob-style patterns for dataset type names that should be converted.",
        dtype=str,
        default=["*"])
    datasetIgnorePatterns = ListField(
        "Glob-style patterns for dataset type names that should not be "
        "converted despite matching a pattern in datasetIncludePatterns.",
        dtype=str,
        default=[])
    ccdKey = Field(
        "Key used for the Gen2 equivalent of 'detector' in data IDs.",
        dtype=str,
        default="ccd",
    )
    relatedOnly = Field(
        "If True (default), only convert datasets that are related to the "
        "ingested visits.  Ignored unless a list of visits is passed to "
        "run().",
        dtype=bool,
        default=False,
    )

    @property
    def transfer(self):
        return self.raws.transfer

    @transfer.setter
    def transfer(self, value):
        self.raws.transfer = value

    @property
    def instrument(self):
        return self.raws.instrument

    @instrument.setter
    def instrument(self, value):
        self.raws.instrument = value

    def setDefaults(self):
        self.transfer = None
Esempio n. 9
0
class ConvertRepoConfig(Config):
    raws = ConfigurableField(
        "Configuration for subtask responsible for ingesting raws and adding "
        "exposure dimension entries.",
        target=RawIngestTask,
    )
    defineVisits = ConfigurableField(
        "Configuration for the subtask responsible for defining visits from "
        "exposures.",
        target=DefineVisitsTask,
    )
    skyMaps = ConfigDictField(
        "Mapping from Gen3 skymap name to the parameters used to construct a "
        "BaseSkyMap instance.  This will be used to associate names with "
        "existing skymaps found in the Gen2 repo.",
        keytype=str,
        itemtype=ConvertRepoSkyMapConfig,
        default={}
    )
    rootSkyMapName = Field(
        "Name of a Gen3 skymap (an entry in ``self.skyMaps``) to assume for "
        "datasets in the root repository when no SkyMap is found there. ",
        dtype=str,
        optional=True,
        default=None,
    )
    runs = DictField(
        "A mapping from dataset type name to the RUN collection they should "
        "be inserted into.  This must include all datasets that can be found "
        "in the root repository; other repositories will use per-repository "
        "runs.",
        keytype=str,
        itemtype=str,
        default={},
    )
    runsForced = DictField(
        "Like ``runs``, but is used even when the dataset is present in a "
        "non-root repository (i.e. rerun), overriding the non-root "
        "repository's main collection.",
        keytype=str,
        itemtype=str,
        default={
            "brightObjectMask": "masks",
        }
    )
    storageClasses = DictField(
        "Mapping from dataset type name or Gen2 policy entry (e.g. 'python' "
        "or 'persistable') to the Gen3 StorageClass name.",
        keytype=str,
        itemtype=str,
        default={
            "bias": "ExposureF",
            "dark": "ExposureF",
            "flat": "ExposureF",
            "defects": "Defects",
            "crosstalk": "CrosstalkCalib",
            "BaseSkyMap": "SkyMap",
            "BaseCatalog": "Catalog",
            "BackgroundList": "Background",
            "raw": "Exposure",
            "MultilevelParquetTable": "DataFrame",
            "ParquetTable": "DataFrame",
            "SkyWcs": "Wcs",
        }
    )
    formatterClasses = DictField(
        "Mapping from dataset type name to formatter class. "
        "By default these are derived from the formatters listed in the"
        " Gen3 datastore configuration.",
        keytype=str,
        itemtype=str,
        default={}
    )
    targetHandlerClasses = DictField(
        "Mapping from dataset type name to target handler class.",
        keytype=str,
        itemtype=str,
        default={}
    )
    doRegisterInstrument = Field(
        "If True (default), add dimension records for the Instrument and its "
        "filters and detectors to the registry instead of assuming they are "
        "already present.",
        dtype=bool,
        default=True,
    )
    refCats = ListField(
        "The names of reference catalogs (subdirectories under ref_cats) to "
        "be converted",
        dtype=str,
        default=[]
    )
    fileIgnorePatterns = ListField(
        "Filename globs that should be ignored instead of being treated as "
        "datasets.",
        dtype=str,
        default=["README.txt", "*~?", "butler.yaml", "gen3.sqlite3",
                 "registry.sqlite3", "calibRegistry.sqlite3", "_mapper",
                 "_parent", "repositoryCfg.yaml"]
    )
    rawDatasetType = Field(
        "Gen2 dataset type to use for raw data.",
        dtype=str,
        default="raw",
    )
    datasetIncludePatterns = ListField(
        "Glob-style patterns for dataset type names that should be converted.",
        dtype=str,
        default=["*"]
    )
    datasetIgnorePatterns = ListField(
        "Glob-style patterns for dataset type names that should not be "
        "converted despite matching a pattern in datasetIncludePatterns.",
        dtype=str,
        default=[]
    )
    ccdKey = Field(
        "Key used for the Gen2 equivalent of 'detector' in data IDs.",
        dtype=str,
        default="ccd",
    )
    relatedOnly = Field(
        "If True (default), only convert datasets that are related to the "
        "ingested visits.  Ignored unless a list of visits is passed to "
        "run().",
        dtype=bool,
        default=False,
    )
    doMakeUmbrellaCollection = Field(
        "If True (default), define an '<instrument>/defaults' CHAINED "
        "collection that includes everything found in the root repo as well "
        "as the default calibration collection.",
        dtype=bool,
        default=True,
    )
    extraUmbrellaChildren = ListField(
        "Additional child collections to include in the umbrella collection. "
        "Ignored if doMakeUmbrellaCollection=False.",
        dtype=str,
        default=[]
    )

    @property
    def transfer(self):
        return self.raws.transfer

    @transfer.setter
    def transfer(self, value):
        self.raws.transfer = value

    def setDefaults(self):
        self.transfer = None