class DiaPipelineConnections(pipeBase.PipelineTaskConnections, dimensions=("instrument", "visit", "detector"), defaultTemplates={ "coaddName": "deep", "fakesType": "" }): """Butler connections for DiaPipelineTask. """ diaSourceSchema = connTypes.InitInput( doc="Schema of the DiaSource catalog produced during image " "differencing", name="{fakesType}{coaddName}Diff_diaSrc_schema", storageClass="SourceCatalog", multiple=True) diaSourceCat = connTypes.Input( doc="Catalog of DiaSources produced during image differencing.", name="{fakesType}{coaddName}Diff_diaSrc", storageClass="SourceCatalog", dimensions=("instrument", "visit", "detector"), ) diffIm = connTypes.Input( doc="Difference image on which the DiaSources were detected.", name="{fakesType}{coaddName}Diff_differenceExp", storageClass="ExposureF", dimensions=("instrument", "visit", "detector"), ) exposure = connTypes.Input( doc="Calibrated exposure differenced with a template image during " "image differencing.", name="calexp", storageClass="ExposureF", dimensions=("instrument", "visit", "detector"), ) warpedExposure = connTypes.Input( doc="Warped template used to create `subtractedExposure`. Not PSF " "matched.", dimensions=("instrument", "visit", "detector"), storageClass="ExposureF", name="{fakesType}{coaddName}Diff_warpedExp", ) apdbMarker = connTypes.Output( doc="Marker dataset storing the configuration of the Apdb for each " "visit/detector. Used to signal the completion of the pipeline.", name="apdb_marker", storageClass="Config", dimensions=("instrument", "visit", "detector"), ) associatedDiaSources = connTypes.Output( doc="Optional output storing the DiaSource catalog after matching, " "calibration, and standardization for insertation into the Apdb.", name="{fakesType}{coaddName}Diff_assocDiaSrc", storageClass="DataFrame", dimensions=("instrument", "visit", "detector"), ) def __init__(self, *, config=None): super().__init__(config=config) if not config.doWriteAssociatedSources: self.outputs.remove("associatedDiaSources")
class InsertFakesConnections(PipelineTaskConnections, defaultTemplates={"coaddName": "deep", "fakesType": "fakes_"}, dimensions=("tract", "patch", "band", "skymap")): image = cT.Input( doc="Image into which fakes are to be added.", name="{coaddName}Coadd", storageClass="ExposureF", dimensions=("tract", "patch", "band", "skymap") ) fakeCat = cT.Input( doc="Catalog of fake sources to draw inputs from.", name="{fakesType}fakeSourceCat", storageClass="DataFrame", dimensions=("tract", "skymap") ) imageWithFakes = cT.Output( doc="Image with fake sources added.", name="{fakesType}{coaddName}Coadd", storageClass="ExposureF", dimensions=("tract", "patch", "band", "skymap") )
class SingleStarCentroidTaskConnections(pipeBase.PipelineTaskConnections, dimensions=("instrument", "visit", "detector")): inputExp = cT.Input( name="icExp", doc="Image-characterize output exposure", storageClass="ExposureF", dimensions=("instrument", "visit", "detector"), multiple=False, ) inputSources = cT.Input( name="icSrc", doc="Image-characterize output sources.", storageClass="SourceCatalog", dimensions=("instrument", "visit", "detector"), multiple=False, ) astromRefCat = cT.PrerequisiteInput( doc="Reference catalog to use for astrometry", name="gaia_dr2_20200414", storageClass="SimpleCatalog", dimensions=("skypix", ), deferLoad=True, multiple=True, ) atmospecCentroid = cT.Output( name="atmospecCentroid", doc="The main star centroid in yaml format.", storageClass="StructuredDataDict", dimensions=("instrument", "visit", "detector"), )
class DeblendCoaddSourcesMultiConnections(PipelineTaskConnections, dimensions=("tract", "patch", "skymap"), defaultTemplates=deblendBaseTemplates ): inputSchema = cT.InitInput( doc="Input schema to use in the deblend catalog", name="{inputCoaddName}Coadd_mergeDet_schema", storageClass="SourceCatalog") peakSchema = cT.InitInput(doc="Schema of the footprint peak catalogs", name="{inputCoaddName}Coadd_peak_schema", storageClass="PeakCatalog") mergedDetections = cT.Input(doc="Detection catalog merged across bands", name="{inputCoaddName}Coadd_mergeDet", storageClass="SourceCatalog", dimensions=("tract", "patch", "skymap")) coadds = cT.Input(doc="Exposure on which to run deblending", name="{inputCoaddName}Coadd_calexp", storageClass="ExposureF", multiple=True, dimensions=("tract", "patch", "band", "skymap")) outputSchema = cT.InitOutput( doc="Output of the schema used in deblending task", name="{outputCoaddName}Coadd_deblendedFlux_schema", storageClass="SourceCatalog") templateCatalogs = cT.Output( doc="Template catalogs produced by multiband deblending", name="{outputCoaddName}Coadd_deblendedFlux", storageClass="SourceCatalog", dimensions=("tract", "patch", "band", "skymap"), multiple=True)
class ExamplePipelineTaskConnections(PipelineTaskConnections, dimensions=["Visit", "Detector"]): input1 = cT.Input(name="", dimensions=["Visit", "Detector"], storageClass="example", doc="Input for this task") input2 = cT.Input(name="", dimensions=["Visit", "Detector"], storageClass="example", doc="Input for this task") output1 = cT.Output(name="", dimensions=["Visit", "Detector"], storageClass="example", doc="Output for this task") output2 = cT.Output(name="", dimensions=["Visit", "Detector"], storageClass="example", doc="Output for this task") def __init__(self, *, config=None): super().__init__(config=config) if not config.connections.input2: self.inputs.remove('input2') if not config.connections.output2: self.outputs.remove('output2')
class CalcZernikesTaskConnections( pipeBase.PipelineTaskConnections, dimensions=("visit", "detector", "instrument") ): donutStampsExtra = connectionTypes.Input( doc="Extra-focal Donut Postage Stamp Images", dimensions=("visit", "detector", "instrument"), storageClass="StampsBase", name="donutStampsExtra", ) donutStampsIntra = connectionTypes.Input( doc="Intra-focal Donut Postage Stamp Images", dimensions=("visit", "detector", "instrument"), storageClass="StampsBase", name="donutStampsIntra", ) outputZernikesRaw = connectionTypes.Output( doc="Zernike Coefficients from all donuts", dimensions=("visit", "detector", "instrument"), storageClass="NumpyArray", name="zernikeEstimateRaw", ) outputZernikesAvg = connectionTypes.Output( doc="Zernike Coefficients averaged over donuts", dimensions=("visit", "detector", "instrument"), storageClass="NumpyArray", name="zernikeEstimateAvg", )
class VisitConnections(PipelineTaskConnections, dimensions={"instrument", "visit"}): a = connectionTypes.Input( name="VisitA", storageClass="StructuredData", multiple=False, dimensions={"instrument", "visit"}, ) b = connectionTypes.Input( name="VisitB", storageClass="StructuredData", multiple=False, dimensions={"instrument", "visit"}, ) outA = connectionTypes.Output( name="VisitOutA", storageClass="StructuredData", multiple=False, dimensions={"instrument", "visit"}, ) outB = connectionTypes.Output( name="VisitOutB", storageClass="StructuredData", multiple=False, dimensions={"instrument", "visit"}, )
class ProcessBrightStarsConnections(pipeBase.PipelineTaskConnections, dimensions=("instrument", "visit", "detector")): inputExposure = cT.Input( doc="Input exposure from which to extract bright star stamps", name="calexp", storageClass="ExposureF", dimensions=("visit", "detector") ) skyCorr = cT.Input( doc="Input Sky Correction to be subtracted from the calexp if doApplySkyCorr=True", name="skyCorr", storageClass="Background", dimensions=("instrument", "visit", "detector") ) refCat = cT.PrerequisiteInput( doc="Reference catalog that contains bright star positions", name="gaia_dr2_20200414", storageClass="SimpleCatalog", dimensions=("skypix",), multiple=True, deferLoad=True ) brightStarStamps = cT.Output( doc="Set of preprocessed postage stamps, each centered on a single bright star.", name="brightStarStamps", storageClass="BrightStarStamps", dimensions=("visit", "detector") ) def __init__(self, *, config=None): super().__init__(config=config) if not config.doApplySkyCorr: self.inputs.remove("skyCorr")
class TransformDiaSourceCatalogConnections(pipeBase.PipelineTaskConnections, dimensions=("instrument", "visit", "detector"), defaultTemplates={ "coaddName": "deep", "fakesType": "" }): diaSourceSchema = connTypes.InitInput( doc="Schema for DIASource catalog output by ImageDifference.", storageClass="SourceCatalog", name="{fakesType}{coaddName}Diff_diaSrc_schema", ) diaSourceCat = connTypes.Input( doc="Catalog of DiaSources produced during image differencing.", name="{fakesType}{coaddName}Diff_diaSrc", storageClass="SourceCatalog", dimensions=("instrument", "visit", "detector"), ) diffIm = connTypes.Input( doc="Difference image on which the DiaSources were detected.", name="{fakesType}{coaddName}Diff_differenceExp", storageClass="ExposureF", dimensions=("instrument", "visit", "detector"), ) diaSourceTable = connTypes.Output( doc=".", name="{fakesType}{coaddName}Diff_diaSrcTable", storageClass="DataFrame", dimensions=("instrument", "visit", "detector"), )
class SubtractTaskConnections( pipeBase.PipelineTaskConnections, dimensions={"instrument", "exposure", "detector"}, defaultTemplates={}): inputImage = cT.Input( name="intype", # default dataset type for input image doc="Input image.", storageClass="SPHERExImage", dimensions=["instrument", "exposure", "detector"], ) subtractImage = cT.Input( name="subtracttype", # default dataset type for subtract image doc="Image that will be subtracted from the input image.", storageClass="SPHERExImage", dimensions=["instrument", "exposure", "detector"], ) outputImage = cT.Output( name='postsubtracttype', # default dataset type for output image doc="Output image after subtraction.", storageClass="SPHERExImage", dimensions=["instrument", "exposure", "detector"], ) def __init__(self, *, config=None): super().__init__(config=config)
class BaseMakeQaTractTablesTaskConnections(pipeBase.PipelineTaskConnections, defaultTemplates={"coaddName": "deep"}, dimensions=("tract", "skymap")): inputObjCats = cT.Input( doc="The coadd catalog from which to collect relevant columns.", storageClass="DataFrame", name="{coaddName}Coadd_obj", dimensions=("tract", "patch", "skymap"), multiple=True, deferLoad=True, ) inputObjectTable = cT.Input( doc=("The objectTable_tract associated with the tract. Only used to ensure the tables persisted " "have the same row ordering as the objectTable_tract tables for ease of joint use."), storageClass="DataFrame", name="objectTable_tract", dimensions=("tract", "skymap"), deferLoad=True, ) qaTractTable = cT.Output( doc="The collated catalog of measurements with additional columns for QA added.", storageClass="DataFrame", name="qaTractTable_forced", dimensions=("tract", "band", "skymap"), )
class GridFitConnections(pipeBase.PipelineTaskConnections, dimensions=("instrument", "exposure", "detector")): inputCat = cT.Input( doc="Source catalog produced by characterize spot task.", name='spotSrc', storageClass="SourceCatalog", dimensions=("instrument", "exposure", "detector")) bbox = cT.Input(doc="Bounding box for CCD.", name="postISRCCD.bbox", storageClass="Box2I", dimensions=("instrument", "exposure", "detector")) gridCalibTable = cT.PrerequisiteInput( doc="Calibration table for spot grid.", name="gridCalibration", storageClass="AstropyTable", dimensions=("instrument", "detector"), isCalibration=True) gridSourceCat = cT.Output(doc="Source catalog produced by grid fit task.", name="gridSpotSrc", storageClass="SourceCatalog", dimensions=("instrument", "exposure", "detector")) def __init__(self, *, config=None): super().__init__(config=config) if config.useGridCalibration is not True: self.prerequisiteInputs.discard("gridCalibTable")
class MatchApFakesConnections(PipelineTaskConnections, defaultTemplates={ "coaddName": "deep", "fakesType": "fakes_" }, dimensions=("tract", "skymap", "instrument", "visit", "detector")): fakeCat = connTypes.Input( doc="Catalog of fake sources to draw inputs from.", name="{fakesType}fakeSourceCat", storageClass="DataFrame", dimensions=("tract", "skymap")) diffIm = connTypes.Input( doc="Difference image on which the DiaSources were detected.", name="{fakesType}{coaddName}Diff_differenceExp", storageClass="ExposureF", dimensions=("instrument", "visit", "detector"), ) associatedDiaSources = connTypes.Input( doc="Optional output storing the DiaSource catalog after matching and " "SDMification.", name="{fakesType}{coaddName}Diff_assocDiaSrc", storageClass="DataFrame", dimensions=("instrument", "visit", "detector"), ) matchedDiaSources = connTypes.Output( doc="", name="{fakesType}{coaddName}Diff_matchDiaSrc", storageClass="DataFrame", dimensions=("instrument", "visit", "detector"), )
class DeblendCoaddSourceSingleConnections(PipelineTaskConnections, dimensions=("tract", "patch", "band", "skymap"), defaultTemplates=deblendBaseTemplates ): inputSchema = cT.InitInput( doc="Input schema to use in the deblend catalog", name="{inputCoaddName}Coadd_mergeDet_schema", storageClass="SourceCatalog") peakSchema = cT.InitInput(doc="Schema of the footprint peak catalogs", name="{inputCoaddName}Coadd_peak_schema", storageClass="PeakCatalog") mergedDetections = cT.Input(doc="Detection catalog merged across bands", name="{inputCoaddName}Coadd_mergeDet", storageClass="SourceCatalog", dimensions=("tract", "patch", "skymap")) coadd = cT.Input(doc="Exposure on which to run deblending", name="{inputCoaddName}Coadd_calexp", storageClass="ExposureF", dimensions=("tract", "patch", "band", "skymap")) measureCatalog = cT.Output( doc="The output measurement catalog of deblended sources", name="{outputCoaddName}Coadd_deblendedFlux", storageClass="SourceCatalog", dimensions=("tract", "patch", "band", "skymap")) outputSchema = cT.InitOutput( doc="Output of the schema used in deblending task", name="{outputCoaddName}Coadd_deblendedFlux_schema", storageClass="SourceCatalog") def setDefaults(self): super().setDefaults() self.singleBandDeblend.propagateAllPeaks = True
class ApertureTaskConnections(pipeBase.PipelineTaskConnections, dimensions=("visit", "detector", "band")): exposure = connectionTypes.Input( doc="Input exposure to make measurements on", dimensions=("visit", "detector", "band"), storageClass="ExposureF", name="calexp", ) inputCatalog = connectionTypes.Input( doc="Input catalog with existing measurements", dimensions=("visit", "detector", "band"), storageClass="SourceCatalog", name="src", ) inputCatalog = connectionTypes.Input( doc="Input catalog with existing measurements", dimensions=( "visit", "detector", "band", ), storageClass="SourceCatalog", name="src", ) outputCatalog = connectionTypes.Output( doc="Aperture measurements", dimensions=("visit", "detector", "band"), storageClass="SourceCatalog", name="customAperture", ) outputSchema = connectionTypes.InitOutput( doc="Schema created in Aperture PipelineTask", storageClass="SourceCatalog", name="customAperture_schema", )
class ApertureTaskConnections( pipeBase.PipelineTaskConnections, defaultTemplates={"outputName": "customAperture"}, dimensions=("visit", "band"), ): exposures = connectionTypes.Input( doc="Input exposure to make measurements on", dimensions=("visit", "detector", "band"), storageClass="ExposureF", name="calexp", multiple=True, deferLoad=True, ) backgrounds = connectionTypes.Input( doc="Background model for the exposure", storageClass="Background", name="calexpBackground", dimensions=("visit", "detector", "band"), multiple=True, deferLoad=True, ) inputCatalogs = connectionTypes.Input( doc="Input catalog with existing measurements", dimensions=( "visit", "detector", "band", ), storageClass="SourceCatalog", name="src", multiple=True, deferLoad=True, ) outputCatalogs = connectionTypes.Output( doc="Aperture measurements", dimensions=("visit", "detector", "band"), storageClass="SourceCatalog", name="{outputName}", multiple=True, ) outputSchema = connectionTypes.InitOutput( doc="Schema created in Aperture PipelineTask", storageClass="SourceCatalog", name="{outputName}_schema", ) areaMasks = connectionTypes.PrerequisiteInput( doc="A mask of areas to be ignored", storageClass="Mask", dimensions=("visit", "detector", "band"), name="ApAreaMask", multiple=True, deferLoad=True, ) def __init__(self, *, config=None): super().__init__(config=config) if config.doLocalBackground is False: self.inputs.remove("backgrounds")
class MakeWarpConnections(pipeBase.PipelineTaskConnections, dimensions=("tract", "patch", "skymap", "instrument", "visit"), defaultTemplates={"coaddName": "deep"}): calExpList = cT.Input( doc="Input exposures to be resampled and optionally PSF-matched onto a SkyMap projection/patch", name="calexp", storageClass="ExposureF", dimensions=("instrument", "visit", "detector"), multiple=True, ) backgroundList = cT.Input( doc="Input backgrounds to be added back into the calexp if bgSubtracted=False", name="calexpBackground", storageClass="Background", dimensions=("instrument", "visit", "detector"), multiple=True, ) skyCorrList = cT.Input( doc="Input Sky Correction to be subtracted from the calexp if doApplySkyCorr=True", name="skyCorr", storageClass="Background", dimensions=("instrument", "visit", "detector"), multiple=True, ) skyMap = cT.Input( doc="Input definition of geometry/bbox and projection/wcs for warped exposures", name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME, storageClass="SkyMap", dimensions=("skymap",), ) direct = cT.Output( doc=("Output direct warped exposure (previously called CoaddTempExp), produced by resampling ", "calexps onto the skyMap patch geometry."), name="{coaddName}Coadd_directWarp", storageClass="ExposureF", dimensions=("tract", "patch", "skymap", "visit", "instrument"), ) psfMatched = cT.Output( doc=("Output PSF-Matched warped exposure (previously called CoaddTempExp), produced by resampling ", "calexps onto the skyMap patch geometry and PSF-matching to a model PSF."), name="{coaddName}Coadd_psfMatchedWarp", storageClass="ExposureF", dimensions=("tract", "patch", "skymap", "visit", "instrument"), ) def __init__(self, *, config=None): super().__init__(config=config) if config.bgSubtracted: self.inputs.remove("backgroundList") if not config.doApplySkyCorr: self.inputs.remove("skyCorrList") if not config.makeDirect: self.outputs.remove("direct") if not config.makePsfMatched: self.outputs.remove("psfMatched")
class DeblendCoaddSourcesMultiConnections(PipelineTaskConnections, dimensions=("tract", "patch", "skymap"), defaultTemplates=deblendBaseTemplates ): inputSchema = cT.InitInput( doc="Input schema to use in the deblend catalog", name="{inputCoaddName}Coadd_mergeDet_schema", storageClass="SourceCatalog") peakSchema = cT.InitInput(doc="Schema of the footprint peak catalogs", name="{inputCoaddName}Coadd_peak_schema", storageClass="PeakCatalog") mergedDetections = cT.Input(doc="Detection catalog merged across bands", name="{inputCoaddName}Coadd_mergeDet", storageClass="SourceCatalog", dimensions=("tract", "patch", "skymap")) coadds = cT.Input(doc="Exposure on which to run deblending", name="{inputCoaddName}Coadd_calexp", storageClass="ExposureF", multiple=True, dimensions=("tract", "patch", "band", "skymap")) outputSchema = cT.InitOutput( doc="Output of the schema used in deblending task", name="{outputCoaddName}Coadd_deblendedFlux_schema", storageClass="SourceCatalog") fluxCatalogs = cT.Output( doc="Flux weighted catalogs produced by multiband deblending", name="{outputCoaddName}Coadd_deblendedFlux", storageClass="SourceCatalog", dimensions=("tract", "patch", "band", "skymap"), multiple=True) templateCatalogs = cT.Output( doc="Template catalogs produced by multiband deblending", name="{outputCoaddName}Coadd_deblendedModel", storageClass="SourceCatalog", dimensions=("tract", "patch", "band", "skymap"), multiple=True) deblendedCatalog = cT.Output( doc="Catalogs produced by multiband deblending", name="{outputCoaddName}Coadd_deblendedCatalog", storageClass="SourceCatalog", dimensions=("tract", "patch", "skymap"), ) scarletModelData = cT.Output( doc="Multiband scarlet models produced by the deblender", name="{outputCoaddName}Coadd_scarletModelData", storageClass="ScarletModelData", dimensions=("tract", "patch", "skymap"), ) def __init__(self, *, config=None): super().__init__(config=config) # Remove unused connections. # TODO: deprecate once RFC-860 passes. self.outputs -= set(("fluxCatalogs", "templateCatalogs"))
class CpVerifyStatsConnections( pipeBase.PipelineTaskConnections, dimensions={"instrument", "exposure", "detector"}, defaultTemplates={}): inputExp = cT.Input( name="postISRCCD", doc="Input exposure to calculate statistics for.", storageClass="Exposure", dimensions=["instrument", "exposure", "detector"], ) taskMetadata = cT.Input( name="isrTask_metadata", doc="Input task metadata to extract statistics from.", storageClass="PropertySet", dimensions=["instrument", "exposure", "detector"], ) inputCatalog = cT.Input( name="src", doc="Input catalog to calculate statistics for.", storageClass="SourceCatalog", dimensions=["instrument", "visit", "detector"], ) uncorrectedCatalog = cT.Input( name="uncorrectedSrc", doc="Input catalog without correction applied.", storageClass="SourceCatalog", dimensions=["instrument", "visit", "detector"], ) camera = cT.PrerequisiteInput( name="camera", storageClass="Camera", doc="Input camera.", dimensions=[ "instrument", ], isCalibration=True, ) outputStats = cT.Output( name="detectorStats", doc="Output statistics from cp_verify.", storageClass="StructuredDataDict", dimensions=["instrument", "exposure", "detector"], ) def __init__(self, *, config=None): super().__init__(config=config) if len(config.metadataStatKeywords) < 1: self.inputs.discard('taskMetadata') if len(config.catalogStatKeywords) < 1: self.inputs.discard('inputCatalog') self.inputs.discard('uncorrectedCatalog')
class CalibCombineConnections(pipeBase.PipelineTaskConnections, dimensions=("instrument", "detector")): inputExps = cT.Input( name="cpInputs", doc="Input pre-processed exposures to combine.", storageClass="Exposure", dimensions=("instrument", "detector", "exposure"), multiple=True, ) inputScales = cT.Input( name="cpScales", doc="Input scale factors to use.", storageClass="StructuredDataDict", dimensions=("instrument", ), multiple=False, ) outputData = cT.Output( name="cpProposal", doc="Output combined proposed calibration.", storageClass="ExposureF", dimensions=("instrument", "detector"), isCalibration=True, ) def __init__(self, *, config=None): super().__init__(config=config) if config and config.exposureScaling != 'InputList': self.inputs.discard("inputScales") if config and len(config.calibrationDimensions) != 0: newDimensions = tuple(config.calibrationDimensions) newOutputData = cT.Output( name=self.outputData.name, doc=self.outputData.doc, storageClass=self.outputData.storageClass, dimensions=self.allConnections['outputData'].dimensions + newDimensions, isCalibration=True, ) self.dimensions.update(config.calibrationDimensions) self.outputData = newOutputData if config.exposureScaling == 'InputList': newInputScales = cT.PrerequisiteInput( name=self.inputScales.name, doc=self.inputScales.doc, storageClass=self.inputScales.storageClass, dimensions=self.allConnections['inputScales'].dimensions + newDimensions) self.dimensions.update(config.calibrationDimensions) self.inputScales = newInputScales
class CrosstalkColumnConnections(pipeBase.PipelineTaskConnections, dimensions=("instrument", "exposure", "detector")): inputExp = cT.Input( name="crosstalkInputs", doc="Input post-ISR processed exposure to measure crosstalk from.", storageClass="Exposure", dimensions=("instrument", "exposure", "detector"), multiple=False, ) rawExp = cT.Input( name="rawInputs", doc="Input raw exposure to measure noise covariance from.", storageClass="Exposure", dimensions=("instrument", "exposure", "detector"), multiple=False, ) outputRatios = cT.Output( name="crosstalkRatios", doc="Extracted crosstalk pixel ratios.", storageClass="StructuredDataDict", dimensions=("instrument", "exposure", "detector"), ) outputFluxes = cT.Output( name="crosstalkFluxes", doc="Source pixel fluxes used in ratios.", storageClass="StructuredDataDict", dimensions=("instrument", "exposure", "detector"), ) outputZOffsets = cT.Output( name="crosstalkBackgroundZOffsets", doc="Z offset parameters used in background model.", storageClass="StructuredDataDict", dimensions=("instrument", "exposure", "detector"), ) outputYTilts = cT.Output( name="crosstalkBackgroundYTilts", doc="Y tilt parameters used in background model.", storageClass="StructuredDataDict", dimensions=("instrument", "exposure", "detector"), ) outputXTilts = cT.Output( name="crosstalkBackgroundXTilts", doc="X tilt parameters used in background model.", storageClass="StructuredDataDict", dimensions=("instrument", "exposure", "detector"), ) def __init__(self, *, config=None): super().__init__(config=config)
class SkyCorrectionConnections(pipeBase.PipelineTaskConnections, dimensions=("instrument", "visit")): rawLinker = cT.Input( doc="Raw data to provide exp-visit linkage to connect calExp inputs to camera/sky calibs.", name="raw", multiple=True, deferLoad=True, storageClass="ExposureU", dimensions=["instrument", "exposure", "detector"], ) calExpArray = cT.Input( doc="Input exposures to process", name="calexp", multiple=True, storageClass="ExposureF", dimensions=["instrument", "visit", "detector"], ) calBkgArray = cT.Input( doc="Input background files to use", multiple=True, name="calexpBackground", storageClass="Background", dimensions=["instrument", "visit", "detector"], ) camera = cT.PrerequisiteInput( doc="Input camera to use.", name="camera", storageClass="Camera", dimensions=["instrument", "calibration_label"], ) skyCalibs = cT.PrerequisiteInput( doc="Input sky calibrations to use.", name="sky", multiple=True, storageClass="ExposureF", dimensions=["instrument", "physical_filter", "detector", "calibration_label"], ) calExpCamera = cT.Output( doc="Output camera image.", name='calexp_camera', storageClass="ImageF", dimensions=["instrument", "visit"], ) skyCorr = cT.Output( doc="Output sky corrected images.", name='skyCorr', multiple=True, storageClass="Background", dimensions=["instrument", "visit", "detector"], )
class DeblendCoaddSourcesMultiConnections(PipelineTaskConnections, dimensions=("tract", "patch", "skymap"), defaultTemplates=deblendBaseTemplates): inputSchema = cT.InitInput( doc="Input schema to use in the deblend catalog", name="{inputCoaddName}Coadd_mergeDet_schema", storageClass="SourceCatalog" ) peakSchema = cT.InitInput( doc="Schema of the footprint peak catalogs", name="{inputCoaddName}Coadd_peak_schema", storageClass="PeakCatalog" ) mergedDetections = cT.Input( doc="Detection catalog merged across bands", name="{inputCoaddName}Coadd_mergeDet", storageClass="SourceCatalog", dimensions=("tract", "patch", "skymap") ) coadds = cT.Input( doc="Exposure on which to run deblending", name="{inputCoaddName}Coadd_calexp", storageClass="ExposureF", multiple=True, dimensions=("tract", "patch", "abstract_filter", "skymap") ) outputSchema = cT.InitOutput( doc="Output of the schema used in deblending task", name="{outputCoaddName}Coadd_deblendedModel_schema", storageClass="SourceCatalog" ) fluxCatalogs = cT.Output( doc="Flux catalogs produced by multiband deblending, not written " "if conserve flux is turned off", name="{outputCoaddName}Coadd_deblendedFlux", storageClass="SourceCatalog", dimensions=("tract", "patch", "abstract_filter", "skymap") ) templateCatalogs = cT.Output( doc="Template catalogs produced by multiband deblending", name="{outputCoaddName}Coadd_deblendedModel", storageClass="SourceCatalog", dimensions=("tract", "patch", "abstract_filter", "skymap") ) def __init__(self, *, config=None): super().__init__(config=config) if not config.multibandDeblend.conserveFlux: self.outputs -= set(("fluxCatalogs",))
class CharacterizeImageConnections(pipeBase.PipelineTaskConnections, dimensions=("instrument", "visit", "detector")): exposure = cT.Input( doc="Input exposure data", name="postISRCCD", storageClass="ExposureF", dimensions=["instrument", "visit", "detector"], ) characterized = cT.Output( doc="Output characterized data.", name="icExp", storageClass="ExposureF", dimensions=["instrument", "visit", "detector"], ) sourceCat = cT.Output( doc="Output source catalog.", name="icSrc", storageClass="SourceCatalog", dimensions=["instrument", "visit", "detector"], ) backgroundModel = cT.Output( doc="Output background model.", name="icExpBackground", storageClass="Background", dimensions=["instrument", "visit", "detector"], ) outputSchema = cT.InitOutput( doc="Schema of the catalog produced by CharacterizeImage", name="icSrc_schema", storageClass="SourceCatalog", )
class ApdbMetricConnections( MetricConnections, dimensions={"instrument"}, ): """An abstract connections class defining a database input. Notes ----- ``ApdbMetricConnections`` defines the following dataset templates: ``package`` Name of the metric's namespace. By :ref:`verify_metrics <verify-metrics-package>` convention, this is the name of the package the metric is most closely associated with. ``metric`` Name of the metric, excluding any namespace. """ dbInfo = connectionTypes.Input( name="apdb_marker", doc="The dataset from which an APDB instance can be constructed by " "`dbLoader`. By default this is assumed to be a marker produced " "by AP processing.", storageClass="Config", multiple=True, dimensions={"instrument", "visit", "detector"}, ) # Replaces MetricConnections.measurement, which is detector-level measurement = connectionTypes.Output( name="metricvalue_{package}_{metric}", doc="The metric value computed by this task.", storageClass="MetricValue", dimensions={"instrument"}, )
class SingleMetadataMetricConnections( MetricConnections, dimensions={"instrument", "exposure", "detector"}, defaultTemplates={ "labelName": "", "package": None, "metric": None }): """An abstract connections class defining a metadata input. Notes ----- ``SingleMetadataMetricConnections`` defines the following dataset templates: ``package`` Name of the metric's namespace. By :ref:`verify_metrics <verify-metrics-package>` convention, this is the name of the package the metric is most closely associated with. ``metric`` Name of the metric, excluding any namespace. ``labelName`` Pipeline label of the `~lsst.pipe.base.PipelineTask` or name of the `~lsst.pipe.base.CmdLineTask` whose metadata are being read. """ metadata = connectionTypes.Input( name="{labelName}_metadata", doc="The target top-level task's metadata. The name must be set to " "the metadata's butler type, such as 'processCcd_metadata'.", storageClass="PropertySet", dimensions={"Instrument", "Exposure", "Detector"}, multiple=False, )
class MergeMeasurementsConnections(PipelineTaskConnections, dimensions=("skymap", "tract", "patch"), defaultTemplates={"inputCoaddName": "deep", "outputCoaddName": "deep"}): inputSchema = cT.InitInput( doc="Schema for the output merged measurement catalog.", name="{inputCoaddName}Coadd_meas_schema", storageClass="SourceCatalog", ) outputSchema = cT.InitOutput( doc="Schema for the output merged measurement catalog.", name="{outputCoaddName}Coadd_ref_schema", storageClass="SourceCatalog", ) catalogs = cT.Input( doc="Input catalogs to merge.", name="{inputCoaddName}Coadd_meas", multiple=True, storageClass="SourceCatalog", dimensions=["band", "skymap", "tract", "patch"], ) mergedCatalog = cT.Output( doc="Output merged catalog.", name="{outputCoaddName}Coadd_ref", storageClass="SourceCatalog", dimensions=["skymap", "tract", "patch"], )
class PhotonTransferCurveSolveConnections(pipeBase.PipelineTaskConnections, dimensions=("instrument", "detector") ): inputCovariances = cT.Input( name="ptcCovariances", doc="Tuple with measured covariances from flats.", storageClass="PhotonTransferCurveDataset", dimensions=("instrument", "exposure", "detector"), multiple=True, ) camera = cT.PrerequisiteInput( name="camera", doc="Camera the input data comes from.", storageClass="Camera", dimensions=("instrument", ), isCalibration=True, lookupFunction=lookupStaticCalibration, ) outputPtcDataset = cT.Output( name="ptcDatsetProposal", doc="Output proposed ptc dataset.", storageClass="PhotonTransferCurveDataset", dimensions=("instrument", "detector"), multiple=False, isCalibration=True, )
class PatchConnections(PipelineTaskConnections, dimensions={"skymap", "tract"}): a = connectionTypes.Input( name="PatchA", storageClass="StructuredData", multiple=True, dimensions={"skymap", "tract", "patch"}, ) b = connectionTypes.PrerequisiteInput( name="PatchB", storageClass="StructuredData", multiple=False, dimensions={"skymap", "tract"}, ) out = connectionTypes.Output( name="PatchOut", storageClass="StructuredData", multiple=True, dimensions={"skymap", "tract", "patch"}, ) def __init__(self, *, config=None): super().__init__(config=config) if not config.doUseB: self.prerequisiteInputs.remove("b")
class NoDimensionsTestConnections(PipelineTaskConnections, dimensions=set()): input = connectionTypes.Input(name="input", doc="some dict-y input data for testing", storageClass="StructuredDataDict") output = connectionTypes.Output(name="output", doc="some dict-y output data for testing", storageClass="StructuredDataDict")