class DeblendCoaddSourceSingleConnections(PipelineTaskConnections, dimensions=("tract", "patch", "band", "skymap"), defaultTemplates=deblendBaseTemplates ): inputSchema = cT.InitInput( doc="Input schema to use in the deblend catalog", name="{inputCoaddName}Coadd_mergeDet_schema", storageClass="SourceCatalog") peakSchema = cT.InitInput(doc="Schema of the footprint peak catalogs", name="{inputCoaddName}Coadd_peak_schema", storageClass="PeakCatalog") mergedDetections = cT.Input(doc="Detection catalog merged across bands", name="{inputCoaddName}Coadd_mergeDet", storageClass="SourceCatalog", dimensions=("tract", "patch", "skymap")) coadd = cT.Input(doc="Exposure on which to run deblending", name="{inputCoaddName}Coadd_calexp", storageClass="ExposureF", dimensions=("tract", "patch", "band", "skymap")) measureCatalog = cT.Output( doc="The output measurement catalog of deblended sources", name="{outputCoaddName}Coadd_deblendedFlux", storageClass="SourceCatalog", dimensions=("tract", "patch", "band", "skymap")) outputSchema = cT.InitOutput( doc="Output of the schema used in deblending task", name="{outputCoaddName}Coadd_deblendedFlux_schema", storageClass="SourceCatalog") def setDefaults(self): super().setDefaults() self.singleBandDeblend.propagateAllPeaks = True
class DeblendCoaddSourcesMultiConnections(PipelineTaskConnections, dimensions=("tract", "patch", "skymap"), defaultTemplates=deblendBaseTemplates ): inputSchema = cT.InitInput( doc="Input schema to use in the deblend catalog", name="{inputCoaddName}Coadd_mergeDet_schema", storageClass="SourceCatalog") peakSchema = cT.InitInput(doc="Schema of the footprint peak catalogs", name="{inputCoaddName}Coadd_peak_schema", storageClass="PeakCatalog") mergedDetections = cT.Input(doc="Detection catalog merged across bands", name="{inputCoaddName}Coadd_mergeDet", storageClass="SourceCatalog", dimensions=("tract", "patch", "skymap")) coadds = cT.Input(doc="Exposure on which to run deblending", name="{inputCoaddName}Coadd_calexp", storageClass="ExposureF", multiple=True, dimensions=("tract", "patch", "band", "skymap")) outputSchema = cT.InitOutput( doc="Output of the schema used in deblending task", name="{outputCoaddName}Coadd_deblendedFlux_schema", storageClass="SourceCatalog") templateCatalogs = cT.Output( doc="Template catalogs produced by multiband deblending", name="{outputCoaddName}Coadd_deblendedFlux", storageClass="SourceCatalog", dimensions=("tract", "patch", "band", "skymap"), multiple=True)
class DeblendCoaddSourcesMultiConnections(PipelineTaskConnections, dimensions=("tract", "patch", "skymap"), defaultTemplates=deblendBaseTemplates ): inputSchema = cT.InitInput( doc="Input schema to use in the deblend catalog", name="{inputCoaddName}Coadd_mergeDet_schema", storageClass="SourceCatalog") peakSchema = cT.InitInput(doc="Schema of the footprint peak catalogs", name="{inputCoaddName}Coadd_peak_schema", storageClass="PeakCatalog") mergedDetections = cT.Input(doc="Detection catalog merged across bands", name="{inputCoaddName}Coadd_mergeDet", storageClass="SourceCatalog", dimensions=("tract", "patch", "skymap")) coadds = cT.Input(doc="Exposure on which to run deblending", name="{inputCoaddName}Coadd_calexp", storageClass="ExposureF", multiple=True, dimensions=("tract", "patch", "band", "skymap")) outputSchema = cT.InitOutput( doc="Output of the schema used in deblending task", name="{outputCoaddName}Coadd_deblendedFlux_schema", storageClass="SourceCatalog") fluxCatalogs = cT.Output( doc="Flux weighted catalogs produced by multiband deblending", name="{outputCoaddName}Coadd_deblendedFlux", storageClass="SourceCatalog", dimensions=("tract", "patch", "band", "skymap"), multiple=True) templateCatalogs = cT.Output( doc="Template catalogs produced by multiband deblending", name="{outputCoaddName}Coadd_deblendedModel", storageClass="SourceCatalog", dimensions=("tract", "patch", "band", "skymap"), multiple=True) deblendedCatalog = cT.Output( doc="Catalogs produced by multiband deblending", name="{outputCoaddName}Coadd_deblendedCatalog", storageClass="SourceCatalog", dimensions=("tract", "patch", "skymap"), ) scarletModelData = cT.Output( doc="Multiband scarlet models produced by the deblender", name="{outputCoaddName}Coadd_scarletModelData", storageClass="ScarletModelData", dimensions=("tract", "patch", "skymap"), ) def __init__(self, *, config=None): super().__init__(config=config) # Remove unused connections. # TODO: deprecate once RFC-860 passes. self.outputs -= set(("fluxCatalogs", "templateCatalogs"))
class DeblendCoaddSourcesMultiConnections(PipelineTaskConnections, dimensions=("tract", "patch", "skymap"), defaultTemplates=deblendBaseTemplates): inputSchema = cT.InitInput( doc="Input schema to use in the deblend catalog", name="{inputCoaddName}Coadd_mergeDet_schema", storageClass="SourceCatalog" ) peakSchema = cT.InitInput( doc="Schema of the footprint peak catalogs", name="{inputCoaddName}Coadd_peak_schema", storageClass="PeakCatalog" ) mergedDetections = cT.Input( doc="Detection catalog merged across bands", name="{inputCoaddName}Coadd_mergeDet", storageClass="SourceCatalog", dimensions=("tract", "patch", "skymap") ) coadds = cT.Input( doc="Exposure on which to run deblending", name="{inputCoaddName}Coadd_calexp", storageClass="ExposureF", multiple=True, dimensions=("tract", "patch", "abstract_filter", "skymap") ) outputSchema = cT.InitOutput( doc="Output of the schema used in deblending task", name="{outputCoaddName}Coadd_deblendedModel_schema", storageClass="SourceCatalog" ) fluxCatalogs = cT.Output( doc="Flux catalogs produced by multiband deblending, not written " "if conserve flux is turned off", name="{outputCoaddName}Coadd_deblendedFlux", storageClass="SourceCatalog", dimensions=("tract", "patch", "abstract_filter", "skymap") ) templateCatalogs = cT.Output( doc="Template catalogs produced by multiband deblending", name="{outputCoaddName}Coadd_deblendedModel", storageClass="SourceCatalog", dimensions=("tract", "patch", "abstract_filter", "skymap") ) def __init__(self, *, config=None): super().__init__(config=config) if not config.multibandDeblend.conserveFlux: self.outputs -= set(("fluxCatalogs",))
class TransformDiaSourceCatalogConnections(pipeBase.PipelineTaskConnections, dimensions=("instrument", "visit", "detector"), defaultTemplates={ "coaddName": "deep", "fakesType": "" }): diaSourceSchema = connTypes.InitInput( doc="Schema for DIASource catalog output by ImageDifference.", storageClass="SourceCatalog", name="{fakesType}{coaddName}Diff_diaSrc_schema", ) diaSourceCat = connTypes.Input( doc="Catalog of DiaSources produced during image differencing.", name="{fakesType}{coaddName}Diff_diaSrc", storageClass="SourceCatalog", dimensions=("instrument", "visit", "detector"), ) diffIm = connTypes.Input( doc="Difference image on which the DiaSources were detected.", name="{fakesType}{coaddName}Diff_differenceExp", storageClass="ExposureF", dimensions=("instrument", "visit", "detector"), ) diaSourceTable = connTypes.Output( doc=".", name="{fakesType}{coaddName}Diff_diaSrcTable", storageClass="DataFrame", dimensions=("instrument", "visit", "detector"), )
class DiaPipelineConnections(pipeBase.PipelineTaskConnections, dimensions=("instrument", "visit", "detector"), defaultTemplates={ "coaddName": "deep", "fakesType": "" }): """Butler connections for DiaPipelineTask. """ diaSourceSchema = connTypes.InitInput( doc="Schema of the DiaSource catalog produced during image " "differencing", name="{fakesType}{coaddName}Diff_diaSrc_schema", storageClass="SourceCatalog", multiple=True) diaSourceCat = connTypes.Input( doc="Catalog of DiaSources produced during image differencing.", name="{fakesType}{coaddName}Diff_diaSrc", storageClass="SourceCatalog", dimensions=("instrument", "visit", "detector"), ) diffIm = connTypes.Input( doc="Difference image on which the DiaSources were detected.", name="{fakesType}{coaddName}Diff_differenceExp", storageClass="ExposureF", dimensions=("instrument", "visit", "detector"), ) exposure = connTypes.Input( doc="Calibrated exposure differenced with a template image during " "image differencing.", name="calexp", storageClass="ExposureF", dimensions=("instrument", "visit", "detector"), ) warpedExposure = connTypes.Input( doc="Warped template used to create `subtractedExposure`. Not PSF " "matched.", dimensions=("instrument", "visit", "detector"), storageClass="ExposureF", name="{fakesType}{coaddName}Diff_warpedExp", ) apdbMarker = connTypes.Output( doc="Marker dataset storing the configuration of the Apdb for each " "visit/detector. Used to signal the completion of the pipeline.", name="apdb_marker", storageClass="Config", dimensions=("instrument", "visit", "detector"), ) associatedDiaSources = connTypes.Output( doc="Optional output storing the DiaSource catalog after matching, " "calibration, and standardization for insertation into the Apdb.", name="{fakesType}{coaddName}Diff_assocDiaSrc", storageClass="DataFrame", dimensions=("instrument", "visit", "detector"), ) def __init__(self, *, config=None): super().__init__(config=config) if not config.doWriteAssociatedSources: self.outputs.remove("associatedDiaSources")
class MergeMeasurementsConnections(PipelineTaskConnections, dimensions=("skymap", "tract", "patch"), defaultTemplates={"inputCoaddName": "deep", "outputCoaddName": "deep"}): inputSchema = cT.InitInput( doc="Schema for the output merged measurement catalog.", name="{inputCoaddName}Coadd_meas_schema", storageClass="SourceCatalog", ) outputSchema = cT.InitOutput( doc="Schema for the output merged measurement catalog.", name="{outputCoaddName}Coadd_ref_schema", storageClass="SourceCatalog", ) catalogs = cT.Input( doc="Input catalogs to merge.", name="{inputCoaddName}Coadd_meas", multiple=True, storageClass="SourceCatalog", dimensions=["band", "skymap", "tract", "patch"], ) mergedCatalog = cT.Output( doc="Output merged catalog.", name="{outputCoaddName}Coadd_ref", storageClass="SourceCatalog", dimensions=["skymap", "tract", "patch"], )
class Dummy3Connections(PipelineTaskConnections, dimensions=("A", "B")): initInput = cT.InitInput(name="Dummy2InitOutput", storageClass="ExposureF", doc="n/a") initOutput = cT.InitOutput(name="Dummy3InitOutput", storageClass="ExposureF", doc="n/a") input = cT.Input(name="Dummy2Output", storageClass="ExposureF", doc="n/a", dimensions=("A", "B")) output = cT.Output(name="Dummy3Output", storageClass="ExposureF", doc="n/a", dimensions=("A", "B"))
class VisitConnections(PipelineTaskConnections, dimensions={"instrument", "visit"}): initIn = connectionTypes.InitInput( name="VisitInitIn", storageClass="StructuredData", multiple=False, ) a = connectionTypes.Input( name="VisitA", storageClass="StructuredData", multiple=False, dimensions={"instrument", "visit"}, ) b = connectionTypes.Input( name="VisitB", storageClass="StructuredData", multiple=False, dimensions={"instrument", "visit"}, ) initOut = connectionTypes.InitOutput( name="VisitInitOut", storageClass="StructuredData", multiple=True, ) outA = connectionTypes.Output( name="VisitOutA", storageClass="StructuredData", multiple=False, dimensions={"instrument", "visit"}, ) outB = connectionTypes.Output( name="VisitOutB", storageClass="StructuredData", multiple=False, dimensions={"instrument", "visit"}, ) def __init__(self, *, config=None): super().__init__(config=config) if not config.doUseInitIn: self.initInputs.remove("initIn")
class MergeDetectionsConnections(PipelineTaskConnections, dimensions=("tract", "patch", "skymap"), defaultTemplates={"inputCoaddName": 'deep', "outputCoaddName": "deep"}): schema = cT.InitInput( doc="Schema of the input detection catalog", name="{inputCoaddName}Coadd_det_schema", storageClass="SourceCatalog" ) outputSchema = cT.InitOutput( doc="Schema of the merged detection catalog", name="{outputCoaddName}Coadd_mergeDet_schema", storageClass="SourceCatalog" ) outputPeakSchema = cT.InitOutput( doc="Output schema of the Footprint peak catalog", name="{outputCoaddName}Coadd_peak_schema", storageClass="PeakCatalog" ) catalogs = cT.Input( doc="Detection Catalogs to be merged", name="{inputCoaddName}Coadd_det", storageClass="SourceCatalog", dimensions=("tract", "patch", "skymap", "band"), multiple=True ) skyMap = cT.Input( doc="SkyMap to be used in merging", name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME, storageClass="SkyMap", dimensions=("skymap",), ) outputCatalog = cT.Output( doc="Merged Detection catalog", name="{outputCoaddName}Coadd_mergeDet", storageClass="SourceCatalog", dimensions=("tract", "patch", "skymap"), )
class ForcedPhotImageConnections(PipelineTaskConnections, dimensions=("abstract_filter", "skymap", "tract", "patch"), defaultTemplates={ "inputCoaddName": "deep", "outputCoaddName": "deep" }): inputSchema = cT.InitInput( doc="Schema for the input measurement catalogs.", name="{inputCoaddName}Coadd_ref_schema", storageClass="SourceCatalog", ) outputSchema = cT.InitOutput( doc="Schema for the output forced measurement catalogs.", name="{outputCoaddName}Coadd_forced_src_schema", storageClass="SourceCatalog", ) exposure = cT.Input( doc="Input exposure to perform photometry on.", name="{inputCoaddName}Coadd", storageClass="ExposureF", dimensions=["abstract_filter", "skymap", "tract", "patch"], ) refCat = cT.Input( doc="Catalog of shapes and positions at which to force photometry.", name="{inputCoaddName}Coadd_ref", storageClass="SourceCatalog", dimensions=["skymap", "tract", "patch"], ) refWcs = cT.Input( doc="Reference world coordinate system.", name="{inputCoaddName}Coadd.wcs", storageClass="Wcs", dimensions=["abstract_filter", "skymap", "tract", "patch"], ) measCat = cT.Output( doc="Output forced photometry catalog.", name="{outputCoaddName}Coadd_forced_src", storageClass="SourceCatalog", dimensions=["abstract_filter", "skymap", "tract", "patch"], )
class FgcmCalibrateTractTableConnections(pipeBase.PipelineTaskConnections, dimensions=( "instrument", "tract", )): camera = connectionTypes.PrerequisiteInput( doc="Camera instrument", name="camera", storageClass="Camera", dimensions=("instrument", ), lookupFunction=lookupStaticCalibrations, isCalibration=True, ) fgcmLookUpTable = connectionTypes.PrerequisiteInput( doc=("Atmosphere + instrument look-up-table for FGCM throughput and " "chromatic corrections."), name="fgcmLookUpTable", storageClass="Catalog", dimensions=("instrument", ), deferLoad=True, ) sourceSchema = connectionTypes.InitInput( doc="Schema for source catalogs", name="src_schema", storageClass="SourceCatalog", ) refCat = connectionTypes.PrerequisiteInput( doc="Reference catalog to use for photometric calibration", name="cal_ref_cat", storageClass="SimpleCatalog", dimensions=("skypix", ), deferLoad=True, multiple=True, ) source_catalogs = connectionTypes.Input( doc="Source table in parquet format, per visit", name="sourceTable_visit", storageClass="DataFrame", dimensions=("instrument", "visit"), deferLoad=True, multiple=True, ) visitSummary = connectionTypes.Input( doc="Per-visit summary statistics table", name="visitSummary", storageClass="ExposureCatalog", dimensions=("instrument", "visit"), deferLoad=True, multiple=True, ) background = connectionTypes.Input( doc="Calexp background model", name="calexpBackground", storageClass="Background", dimensions=("instrument", "visit", "detector"), deferLoad=True, multiple=True, ) fgcmPhotoCalib = connectionTypes.Output( doc= "Per-tract, per-visit photoCalib exposure catalogs produced from fgcm calibration", name="fgcmPhotoCalibTractCatalog", storageClass="ExposureCatalog", dimensions=( "instrument", "tract", "visit", ), multiple=True, ) fgcmTransmissionAtmosphere = connectionTypes.Output( doc= "Per-visit atmosphere transmission files produced from fgcm calibration", name="transmission_atmosphere_fgcm_tract", storageClass="TransmissionCurve", dimensions=( "instrument", "tract", "visit", ), multiple=True, ) fgcmRepeatability = connectionTypes.Output( doc="Per-band raw repeatability numbers in the fgcm tract calibration", name="fgcmRawRepeatability", storageClass="Catalog", dimensions=( "instrument", "tract", ), multiple=False, ) def __init__(self, *, config=None): super().__init__(config=config) # The ref_dataset_name will be deprecated with Gen2 loaderName = config.fgcmBuildStars.fgcmLoadReferenceCatalog.refObjLoader.ref_dataset_name if config.connections.refCat != loaderName: raise ValueError( "connections.refCat must be the same as " "config.fgcmBuildStars.fgcmLoadReferenceCatalog.refObjLoader.ref_dataset_name" ) if config.fgcmOutputProducts.doReferenceCalibration: loaderName = config.fgcmOutputProducts.refObjLoader.ref_dataset_name if config.connections.refCat != loaderName: raise ValueError( "connections.refCat must be the same as " "config.fgcmOutputProducts.refObjLoader.ref_dataset_name") if not config.fgcmBuildStars.doModelErrorsWithBackground: self.inputs.remove("background") if not config.fgcmOutputProducts.doAtmosphereOutput: self.prerequisiteInputs.remove("fgcmAtmosphereParameters") if not config.fgcmOutputProducts.doZeropointOutput: self.prerequisiteInputs.remove("fgcmZeropoints")
class CalibrateConnections(pipeBase.PipelineTaskConnections, dimensions=("instrument", "visit", "detector"), defaultTemplates={}): icSourceSchema = cT.InitInput( doc="Schema produced by characterize image task, used to initialize this task", name="icSrc_schema", storageClass="SourceCatalog", ) outputSchema = cT.InitOutput( doc="Schema after CalibrateTask has been initialized", name="src_schema", storageClass="SourceCatalog", ) exposure = cT.Input( doc="Input image to calibrate", name="icExp", storageClass="ExposureF", dimensions=("instrument", "visit", "detector"), ) background = cT.Input( doc="Backgrounds determined by characterize task", name="icExpBackground", storageClass="Background", dimensions=("instrument", "visit", "detector"), ) icSourceCat = cT.Input( doc="Source catalog created by characterize task", name="icSrc", storageClass="SourceCatalog", dimensions=("instrument", "visit", "detector"), ) astromRefCat = cT.PrerequisiteInput( doc="Reference catalog to use for astrometry", name="cal_ref_cat", storageClass="SimpleCatalog", dimensions=("skypix",), deferLoad=True, multiple=True, ) photoRefCat = cT.PrerequisiteInput( doc="Reference catalog to use for photometric calibration", name="cal_ref_cat", storageClass="SimpleCatalog", dimensions=("skypix",), deferLoad=True, multiple=True ) outputExposure = cT.Output( doc="Exposure after running calibration task", name="calexp", storageClass="ExposureF", dimensions=("instrument", "visit", "detector"), ) outputCat = cT.Output( doc="Source catalog produced in calibrate task", name="src", storageClass="SourceCatalog", dimensions=("instrument", "visit", "detector"), ) outputBackground = cT.Output( doc="Background models estimated in calibration task", name="calexpBackground", storageClass="Background", dimensions=("instrument", "visit", "detector"), ) matches = cT.Output( doc="Source/refObj matches from the astrometry solver", name="srcMatch", storageClass="Catalog", dimensions=("instrument", "visit", "detector"), ) matchesDenormalized = cT.Output( doc="Denormalized matches from astrometry solver", name="srcMatchFull", storageClass="Catalog", dimensions=("instrument", "visit", "detector"), ) def __init__(self, *, config=None): super().__init__(config=config) if config.doAstrometry is False: self.prerequisiteInputs.remove("astromRefCat") if config.doPhotoCal is False: self.prerequisiteInputs.remove("photoRefCat") if config.doWriteMatches is False or config.doAstrometry is False: self.outputs.remove("matches") if config.doWriteMatchesDenormalized is False or config.doAstrometry is False: self.outputs.remove("matchesDenormalized")
class MultibandFitConnections( pipeBase.PipelineTaskConnections, dimensions=("tract", "patch", "skymap"), defaultTemplates=multibandFitBaseTemplates, ): cat_ref = cT.Input( doc="Reference multiband source catalog", name="{name_input_coadd}Coadd_ref", storageClass="SourceCatalog", dimensions=("tract", "patch", "skymap"), ) cats_meas = cT.Input( doc="Deblended single-band source catalogs", name="{name_input_coadd}Coadd_meas", storageClass="SourceCatalog", multiple=True, dimensions=("tract", "patch", "band", "skymap"), ) coadds = cT.Input( doc="Exposures on which to run fits", name="{name_input_coadd}Coadd_calexp", storageClass="ExposureF", multiple=True, dimensions=("tract", "patch", "band", "skymap"), ) cat_output = cT.Output( doc="Measurement multi-band catalog", name="{name_output_coadd}Coadd_{name_output_cat}", storageClass="SourceCatalog", dimensions=("tract", "patch", "skymap"), ) cat_ref_schema = cT.InitInput( doc="Schema associated with a ref source catalog", storageClass="SourceCatalog", name="{name_input_coadd}Coadd_ref_schema", ) cat_output_schema = cT.InitOutput( doc="Output of the schema used in deblending task", name="{name_output_coadd}Coadd_{name_output_cat}_schema", storageClass="SourceCatalog") def adjustQuantum(self, inputs, outputs, label, data_id): """Validates the `lsst.daf.butler.DatasetRef` bands against the subtask's list of bands to fit and drops unnecessary bands. Parameters ---------- inputs : `dict` Dictionary whose keys are an input (regular or prerequisite) connection name and whose values are a tuple of the connection instance and a collection of associated `DatasetRef` objects. The exact type of the nested collections is unspecified; it can be assumed to be multi-pass iterable and support `len` and ``in``, but it should not be mutated in place. In contrast, the outer dictionaries are guaranteed to be temporary copies that are true `dict` instances, and hence may be modified and even returned; this is especially useful for delegating to `super` (see notes below). outputs : `Mapping` Mapping of output datasets, with the same structure as ``inputs``. label : `str` Label for this task in the pipeline (should be used in all diagnostic messages). data_id : `lsst.daf.butler.DataCoordinate` Data ID for this quantum in the pipeline (should be used in all diagnostic messages). Returns ------- adjusted_inputs : `Mapping` Mapping of the same form as ``inputs`` with updated containers of input `DatasetRef` objects. All inputs involving the 'band' dimension are adjusted to put them in consistent order and remove unneeded bands. adjusted_outputs : `Mapping` Mapping of updated output datasets; always empty for this task. Raises ------ lsst.pipe.base.NoWorkFound Raised if there are not enough of the right bands to run the task on this quantum. """ # Check which bands are going to be fit bands_fit, bands_read_only = self.config.get_band_sets() bands_needed = bands_fit.union(bands_read_only) adjusted_inputs = {} for connection_name, (connection, dataset_refs) in inputs.items(): # Datasets without bands in their dimensions should be fine if 'band' in connection.dimensions: datasets_by_band = { dref.dataId['band']: dref for dref in dataset_refs } if not bands_needed.issubset(datasets_by_band.keys()): raise pipeBase.NoWorkFound( f'DatasetRefs={dataset_refs} have data with bands in the' f' set={set(datasets_by_band.keys())},' f' which is not a superset of the required bands={bands_needed} defined by' f' {self.config.__class__}.fit_multiband=' f'{self.config.fit_multiband._value.__class__}\'s attributes' f' bands_fit={bands_fit} and bands_read_only()={bands_read_only}.' f' Add the required bands={bands_needed.difference(datasets_by_band.keys())}.' ) # Adjust all datasets with band dimensions to include just # the needed bands, in consistent order. adjusted_inputs[connection_name] = (connection, [ datasets_by_band[band] for band in bands_needed ]) # Delegate to super for more checks. inputs.update(adjusted_inputs) super().adjustQuantum(inputs, outputs, label, data_id) return adjusted_inputs, {}
class FgcmBuildStarsTableConnections(pipeBase.PipelineTaskConnections, dimensions=("instrument", ), defaultTemplates={}): camera = connectionTypes.PrerequisiteInput( doc="Camera instrument", name="camera", storageClass="Camera", dimensions=("instrument", ), lookupFunction=lookupStaticCalibrations, isCalibration=True, ) fgcmLookUpTable = connectionTypes.PrerequisiteInput( doc=("Atmosphere + instrument look-up-table for FGCM throughput and " "chromatic corrections."), name="fgcmLookUpTable", storageClass="Catalog", dimensions=("instrument", ), deferLoad=True, ) sourceSchema = connectionTypes.InitInput( doc="Schema for source catalogs", name="src_schema", storageClass="SourceCatalog", ) refCat = connectionTypes.PrerequisiteInput( doc="Reference catalog to use for photometric calibration", name="cal_ref_cat", storageClass="SimpleCatalog", dimensions=("skypix", ), deferLoad=True, multiple=True, ) sourceTable_visit = connectionTypes.Input( doc="Source table in parquet format, per visit", name="sourceTable_visit", storageClass="DataFrame", dimensions=("instrument", "visit"), deferLoad=True, multiple=True, ) visitSummary = connectionTypes.Input( doc=("Per-visit consolidated exposure metadata. These catalogs use " "detector id for the id and must be sorted for fast lookups of a " "detector."), name="visitSummary", storageClass="ExposureCatalog", dimensions=("instrument", "visit"), deferLoad=True, multiple=True, ) background = connectionTypes.Input( doc="Calexp background model", name="calexpBackground", storageClass="Background", dimensions=("instrument", "visit", "detector"), deferLoad=True, multiple=True, ) fgcmVisitCatalog = connectionTypes.Output( doc="Catalog of visit information for fgcm", name="fgcmVisitCatalog", storageClass="Catalog", dimensions=("instrument", ), ) fgcmStarObservations = connectionTypes.Output( doc="Catalog of star observations for fgcm", name="fgcmStarObservations", storageClass="Catalog", dimensions=("instrument", ), ) fgcmStarIds = connectionTypes.Output( doc="Catalog of fgcm calibration star IDs", name="fgcmStarIds", storageClass="Catalog", dimensions=("instrument", ), ) fgcmStarIndices = connectionTypes.Output( doc="Catalog of fgcm calibration star indices", name="fgcmStarIndices", storageClass="Catalog", dimensions=("instrument", ), ) fgcmReferenceStars = connectionTypes.Output( doc="Catalog of fgcm-matched reference stars", name="fgcmReferenceStars", storageClass="Catalog", dimensions=("instrument", ), ) def __init__(self, *, config=None): super().__init__(config=config) if not config.doReferenceMatches: self.prerequisiteInputs.remove("refCat") self.prerequisiteInputs.remove("fgcmLookUpTable") if not config.doModelErrorsWithBackground: self.inputs.remove("background") if not config.doReferenceMatches: self.outputs.remove("fgcmReferenceStars")
class MultibandFitConnections( pipeBase.PipelineTaskConnections, dimensions=("tract", "patch", "skymap"), defaultTemplates=multibandFitBaseTemplates, ): cat_ref = cT.Input( doc="Reference multiband source catalog", name="{name_input_coadd}Coadd_ref", storageClass="SourceCatalog", dimensions=("tract", "patch", "skymap"), ) cats_meas = cT.Input( doc="Deblended single-band source catalogs", name="{name_input_coadd}Coadd_meas", storageClass="SourceCatalog", multiple=True, dimensions=("tract", "patch", "band", "skymap"), ) coadds = cT.Input( doc="Exposures on which to run fits", name="{name_input_coadd}Coadd_calexp", storageClass="ExposureF", multiple=True, dimensions=("tract", "patch", "band", "skymap"), ) cat_output = cT.Output( doc="Measurement multi-band catalog", name="{name_output_coadd}Coadd_{name_output_cat}", storageClass="SourceCatalog", dimensions=("tract", "patch", "skymap"), ) cat_ref_schema = cT.InitInput( doc="Schema associated with a ref source catalog", storageClass="SourceCatalog", name="{name_input_coadd}Coadd_ref_schema", ) cat_output_schema = cT.InitOutput( doc="Output of the schema used in deblending task", name="{name_output_coadd}Coadd_{name_output_cat}_schema", storageClass="SourceCatalog") def adjustQuantum(self, datasetRefMap): """Validates the `lsst.daf.butler.DatasetRef` bands against the subtask's list of bands to fit and drops unnecessary bands. Parameters ---------- datasetRefMap : `NamedKeyDict` Mapping from dataset type to a `set` of `lsst.daf.butler.DatasetRef` objects Returns ------- datasetRefMap : `NamedKeyDict` Modified mapping of input with possibly adjusted `lsst.daf.butler.DatasetRef` objects. Raises ------ ValueError Raised if any of the per-band datasets have an inconsistent band set, or if the band set to fit is not a subset of the data bands. """ datasetRefMap = super().adjustQuantum(datasetRefMap) # Check which bands are going to be fit bands_fit, bands_read_only = self.config.get_band_sets() bands_needed = bands_fit.union(bands_read_only) bands_data = None bands_extra = set() for type_d, ref_d in datasetRefMap.items(): # Datasets without bands in their dimensions should be fine if 'band' in type_d.dimensions: bands_set = {dref.dataId['band'] for dref in ref_d} if bands_data is None: bands_data = bands_set if bands_needed != bands_data: if not bands_needed.issubset(bands_data): raise ValueError( f'Datarefs={ref_d} have data with bands in the set={bands_set},' f'which is not a subset of the required bands={bands_needed} defined by ' f'{self.config.__class__}.fit_multiband=' f'{self.config.fit_multiband._value.__class__}\'s attributes' f' bands_fit={bands_fit} and bands_read_only()={bands_read_only}.' f' Add the required bands={bands_needed.difference(bands_data)}.' ) else: bands_extra = bands_data.difference(bands_needed) elif bands_set != bands_data: raise ValueError( f'Datarefs={ref_d} have data with bands in the set={bands_set}' f' which differs from the previous={bands_data}); bandsets must be identical.' ) if bands_extra: for dref in ref_d: if dref.dataId['band'] in bands_extra: ref_d.remove(dref) return datasetRefMap
class SimpleConnections(PipelineTaskConnections, dimensions=(), defaultTemplates={"template": ""}): schema = cT.InitInput(doc="Schema", name="{template}schema", storageClass="SourceCatalog")