예제 #1
0
class CharacterizeImageConnections(pipeBase.PipelineTaskConnections,
                                   dimensions=("instrument", "visit",
                                               "detector")):
    exposure = cT.Input(
        doc="Input exposure data",
        name="postISRCCD",
        storageClass="ExposureF",
        dimensions=["instrument", "visit", "detector"],
    )
    characterized = cT.Output(
        doc="Output characterized data.",
        name="icExp",
        storageClass="ExposureF",
        dimensions=["instrument", "visit", "detector"],
    )
    sourceCat = cT.Output(
        doc="Output source catalog.",
        name="icSrc",
        storageClass="SourceCatalog",
        dimensions=["instrument", "visit", "detector"],
    )
    backgroundModel = cT.Output(
        doc="Output background model.",
        name="icExpBackground",
        storageClass="Background",
        dimensions=["instrument", "visit", "detector"],
    )
    outputSchema = cT.InitOutput(
        doc="Schema of the catalog produced by CharacterizeImage",
        name="icSrc_schema",
        storageClass="SourceCatalog",
    )
예제 #2
0
class ApertureTaskConnections(pipeBase.PipelineTaskConnections,
                              dimensions=("visit", "detector", "band")):
    exposure = connectionTypes.Input(
        doc="Input exposure to make measurements on",
        dimensions=("visit", "detector", "band"),
        storageClass="ExposureF",
        name="calexp",
    )
    inputCatalog = connectionTypes.Input(
        doc="Input catalog with existing measurements",
        dimensions=("visit", "detector", "band"),
        storageClass="SourceCatalog",
        name="src",
    )
    inputCatalog = connectionTypes.Input(
        doc="Input catalog with existing measurements",
        dimensions=(
            "visit",
            "detector",
            "band",
        ),
        storageClass="SourceCatalog",
        name="src",
    )
    outputCatalog = connectionTypes.Output(
        doc="Aperture measurements",
        dimensions=("visit", "detector", "band"),
        storageClass="SourceCatalog",
        name="customAperture",
    )
    outputSchema = connectionTypes.InitOutput(
        doc="Schema created in Aperture PipelineTask",
        storageClass="SourceCatalog",
        name="customAperture_schema",
    )
예제 #3
0
class MergeMeasurementsConnections(PipelineTaskConnections,
                                   dimensions=("skymap", "tract", "patch"),
                                   defaultTemplates={"inputCoaddName": "deep",
                                                     "outputCoaddName": "deep"}):
    inputSchema = cT.InitInput(
        doc="Schema for the output merged measurement catalog.",
        name="{inputCoaddName}Coadd_meas_schema",
        storageClass="SourceCatalog",
    )
    outputSchema = cT.InitOutput(
        doc="Schema for the output merged measurement catalog.",
        name="{outputCoaddName}Coadd_ref_schema",
        storageClass="SourceCatalog",
    )
    catalogs = cT.Input(
        doc="Input catalogs to merge.",
        name="{inputCoaddName}Coadd_meas",
        multiple=True,
        storageClass="SourceCatalog",
        dimensions=["band", "skymap", "tract", "patch"],
    )
    mergedCatalog = cT.Output(
        doc="Output merged catalog.",
        name="{outputCoaddName}Coadd_ref",
        storageClass="SourceCatalog",
        dimensions=["skymap", "tract", "patch"],
    )
예제 #4
0
class DeblendCoaddSourcesMultiConnections(PipelineTaskConnections,
                                          dimensions=("tract", "patch",
                                                      "skymap"),
                                          defaultTemplates=deblendBaseTemplates
                                          ):
    inputSchema = cT.InitInput(
        doc="Input schema to use in the deblend catalog",
        name="{inputCoaddName}Coadd_mergeDet_schema",
        storageClass="SourceCatalog")
    peakSchema = cT.InitInput(doc="Schema of the footprint peak catalogs",
                              name="{inputCoaddName}Coadd_peak_schema",
                              storageClass="PeakCatalog")
    mergedDetections = cT.Input(doc="Detection catalog merged across bands",
                                name="{inputCoaddName}Coadd_mergeDet",
                                storageClass="SourceCatalog",
                                dimensions=("tract", "patch", "skymap"))
    coadds = cT.Input(doc="Exposure on which to run deblending",
                      name="{inputCoaddName}Coadd_calexp",
                      storageClass="ExposureF",
                      multiple=True,
                      dimensions=("tract", "patch", "band", "skymap"))
    outputSchema = cT.InitOutput(
        doc="Output of the schema used in deblending task",
        name="{outputCoaddName}Coadd_deblendedFlux_schema",
        storageClass="SourceCatalog")
    templateCatalogs = cT.Output(
        doc="Template catalogs produced by multiband deblending",
        name="{outputCoaddName}Coadd_deblendedFlux",
        storageClass="SourceCatalog",
        dimensions=("tract", "patch", "band", "skymap"),
        multiple=True)
class DeblendCoaddSourceSingleConnections(PipelineTaskConnections,
                                          dimensions=("tract", "patch", "band",
                                                      "skymap"),
                                          defaultTemplates=deblendBaseTemplates
                                          ):
    inputSchema = cT.InitInput(
        doc="Input schema to use in the deblend catalog",
        name="{inputCoaddName}Coadd_mergeDet_schema",
        storageClass="SourceCatalog")
    peakSchema = cT.InitInput(doc="Schema of the footprint peak catalogs",
                              name="{inputCoaddName}Coadd_peak_schema",
                              storageClass="PeakCatalog")
    mergedDetections = cT.Input(doc="Detection catalog merged across bands",
                                name="{inputCoaddName}Coadd_mergeDet",
                                storageClass="SourceCatalog",
                                dimensions=("tract", "patch", "skymap"))
    coadd = cT.Input(doc="Exposure on which to run deblending",
                     name="{inputCoaddName}Coadd_calexp",
                     storageClass="ExposureF",
                     dimensions=("tract", "patch", "band", "skymap"))
    measureCatalog = cT.Output(
        doc="The output measurement catalog of deblended sources",
        name="{outputCoaddName}Coadd_deblendedFlux",
        storageClass="SourceCatalog",
        dimensions=("tract", "patch", "band", "skymap"))
    outputSchema = cT.InitOutput(
        doc="Output of the schema used in deblending task",
        name="{outputCoaddName}Coadd_deblendedFlux_schema",
        storageClass="SourceCatalog")

    def setDefaults(self):
        super().setDefaults()
        self.singleBandDeblend.propagateAllPeaks = True
예제 #6
0
class ApertureTaskConnections(
        pipeBase.PipelineTaskConnections,
        defaultTemplates={"outputName": "customAperture"},
        dimensions=("visit", "band"),
):
    exposures = connectionTypes.Input(
        doc="Input exposure to make measurements on",
        dimensions=("visit", "detector", "band"),
        storageClass="ExposureF",
        name="calexp",
        multiple=True,
        deferLoad=True,
    )
    backgrounds = connectionTypes.Input(
        doc="Background model for the exposure",
        storageClass="Background",
        name="calexpBackground",
        dimensions=("visit", "detector", "band"),
        multiple=True,
        deferLoad=True,
    )
    inputCatalogs = connectionTypes.Input(
        doc="Input catalog with existing measurements",
        dimensions=(
            "visit",
            "detector",
            "band",
        ),
        storageClass="SourceCatalog",
        name="src",
        multiple=True,
        deferLoad=True,
    )
    outputCatalogs = connectionTypes.Output(
        doc="Aperture measurements",
        dimensions=("visit", "detector", "band"),
        storageClass="SourceCatalog",
        name="{outputName}",
        multiple=True,
    )
    outputSchema = connectionTypes.InitOutput(
        doc="Schema created in Aperture PipelineTask",
        storageClass="SourceCatalog",
        name="{outputName}_schema",
    )
    areaMasks = connectionTypes.PrerequisiteInput(
        doc="A mask of areas to be ignored",
        storageClass="Mask",
        dimensions=("visit", "detector", "band"),
        name="ApAreaMask",
        multiple=True,
        deferLoad=True,
    )

    def __init__(self, *, config=None):
        super().__init__(config=config)

        if config.doLocalBackground is False:
            self.inputs.remove("backgrounds")
class DeblendCoaddSourcesMultiConnections(PipelineTaskConnections,
                                          dimensions=("tract", "patch",
                                                      "skymap"),
                                          defaultTemplates=deblendBaseTemplates
                                          ):
    inputSchema = cT.InitInput(
        doc="Input schema to use in the deblend catalog",
        name="{inputCoaddName}Coadd_mergeDet_schema",
        storageClass="SourceCatalog")
    peakSchema = cT.InitInput(doc="Schema of the footprint peak catalogs",
                              name="{inputCoaddName}Coadd_peak_schema",
                              storageClass="PeakCatalog")
    mergedDetections = cT.Input(doc="Detection catalog merged across bands",
                                name="{inputCoaddName}Coadd_mergeDet",
                                storageClass="SourceCatalog",
                                dimensions=("tract", "patch", "skymap"))
    coadds = cT.Input(doc="Exposure on which to run deblending",
                      name="{inputCoaddName}Coadd_calexp",
                      storageClass="ExposureF",
                      multiple=True,
                      dimensions=("tract", "patch", "band", "skymap"))
    outputSchema = cT.InitOutput(
        doc="Output of the schema used in deblending task",
        name="{outputCoaddName}Coadd_deblendedFlux_schema",
        storageClass="SourceCatalog")
    fluxCatalogs = cT.Output(
        doc="Flux weighted catalogs produced by multiband deblending",
        name="{outputCoaddName}Coadd_deblendedFlux",
        storageClass="SourceCatalog",
        dimensions=("tract", "patch", "band", "skymap"),
        multiple=True)
    templateCatalogs = cT.Output(
        doc="Template catalogs produced by multiband deblending",
        name="{outputCoaddName}Coadd_deblendedModel",
        storageClass="SourceCatalog",
        dimensions=("tract", "patch", "band", "skymap"),
        multiple=True)
    deblendedCatalog = cT.Output(
        doc="Catalogs produced by multiband deblending",
        name="{outputCoaddName}Coadd_deblendedCatalog",
        storageClass="SourceCatalog",
        dimensions=("tract", "patch", "skymap"),
    )
    scarletModelData = cT.Output(
        doc="Multiband scarlet models produced by the deblender",
        name="{outputCoaddName}Coadd_scarletModelData",
        storageClass="ScarletModelData",
        dimensions=("tract", "patch", "skymap"),
    )

    def __init__(self, *, config=None):
        super().__init__(config=config)
        # Remove unused connections.
        # TODO: deprecate once RFC-860 passes.
        self.outputs -= set(("fluxCatalogs", "templateCatalogs"))
예제 #8
0
class MergeDetectionsConnections(PipelineTaskConnections,
                                 dimensions=("tract", "patch", "skymap"),
                                 defaultTemplates={"inputCoaddName": 'deep', "outputCoaddName": "deep"}):
    schema = cT.InitInput(
        doc="Schema of the input detection catalog",
        name="{inputCoaddName}Coadd_det_schema",
        storageClass="SourceCatalog"
    )

    outputSchema = cT.InitOutput(
        doc="Schema of the merged detection catalog",
        name="{outputCoaddName}Coadd_mergeDet_schema",
        storageClass="SourceCatalog"
    )

    outputPeakSchema = cT.InitOutput(
        doc="Output schema of the Footprint peak catalog",
        name="{outputCoaddName}Coadd_peak_schema",
        storageClass="PeakCatalog"
    )

    catalogs = cT.Input(
        doc="Detection Catalogs to be merged",
        name="{inputCoaddName}Coadd_det",
        storageClass="SourceCatalog",
        dimensions=("tract", "patch", "skymap", "band"),
        multiple=True
    )

    skyMap = cT.Input(
        doc="SkyMap to be used in merging",
        name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
        storageClass="SkyMap",
        dimensions=("skymap",),
    )

    outputCatalog = cT.Output(
        doc="Merged Detection catalog",
        name="{outputCoaddName}Coadd_mergeDet",
        storageClass="SourceCatalog",
        dimensions=("tract", "patch", "skymap"),
    )
예제 #9
0
class Dummy1Connections(PipelineTaskConnections, dimensions=("A", "B")):
    initOutput = cT.InitOutput(name="Dummy1InitOutput",
                               storageClass="ExposureF",
                               doc="n/a")
    input = cT.Input(name="Dummy1Input",
                     storageClass="ExposureF",
                     doc="n/a",
                     dimensions=("A", "B"))
    output = cT.Output(name="Dummy1Output",
                       storageClass="ExposureF",
                       doc="n/a",
                       dimensions=("A", "B"))
예제 #10
0
class DeblendCoaddSourcesMultiConnections(PipelineTaskConnections,
                                          dimensions=("tract", "patch", "skymap"),
                                          defaultTemplates=deblendBaseTemplates):
    inputSchema = cT.InitInput(
        doc="Input schema to use in the deblend catalog",
        name="{inputCoaddName}Coadd_mergeDet_schema",
        storageClass="SourceCatalog"
    )
    peakSchema = cT.InitInput(
        doc="Schema of the footprint peak catalogs",
        name="{inputCoaddName}Coadd_peak_schema",
        storageClass="PeakCatalog"
    )
    mergedDetections = cT.Input(
        doc="Detection catalog merged across bands",
        name="{inputCoaddName}Coadd_mergeDet",
        storageClass="SourceCatalog",
        dimensions=("tract", "patch", "skymap")
    )
    coadds = cT.Input(
        doc="Exposure on which to run deblending",
        name="{inputCoaddName}Coadd_calexp",
        storageClass="ExposureF",
        multiple=True,
        dimensions=("tract", "patch", "abstract_filter", "skymap")
    )
    outputSchema = cT.InitOutput(
        doc="Output of the schema used in deblending task",
        name="{outputCoaddName}Coadd_deblendedModel_schema",
        storageClass="SourceCatalog"
    )
    fluxCatalogs = cT.Output(
        doc="Flux catalogs produced by multiband deblending, not written "
            "if conserve flux is turned off",
        name="{outputCoaddName}Coadd_deblendedFlux",
        storageClass="SourceCatalog",
        dimensions=("tract", "patch", "abstract_filter", "skymap")
    )
    templateCatalogs = cT.Output(
        doc="Template catalogs produced by multiband deblending",
        name="{outputCoaddName}Coadd_deblendedModel",
        storageClass="SourceCatalog",
        dimensions=("tract", "patch", "abstract_filter", "skymap")
    )

    def __init__(self, *, config=None):
        super().__init__(config=config)
        if not config.multibandDeblend.conserveFlux:
            self.outputs -= set(("fluxCatalogs",))
예제 #11
0
class PatchConnections(PipelineTaskConnections, dimensions={"skymap",
                                                            "tract"}):
    a = connectionTypes.Input(
        name="PatchA",
        storageClass="StructuredData",
        multiple=True,
        dimensions={"skymap", "tract", "patch"},
    )
    b = connectionTypes.PrerequisiteInput(
        name="PatchB",
        storageClass="StructuredData",
        multiple=False,
        dimensions={"skymap", "tract"},
    )
    initOutA = connectionTypes.InitOutput(
        name="PatchInitOutA",
        storageClass="StructuredData",
        multiple=False,
    )
    initOutB = connectionTypes.InitOutput(
        name="PatchInitOutB",
        storageClass="StructuredData",
        multiple=False,
    )
    out = connectionTypes.Output(
        name="PatchOut",
        storageClass="StructuredData",
        multiple=True,
        dimensions={"skymap", "tract", "patch"},
    )

    def __init__(self, *, config=None):
        super().__init__(config=config)

        if not config.doUseB:
            self.prerequisiteInputs.remove("b")
예제 #12
0
class ApertureTaskConnections(pipeBase.PipelineTaskConnections,
                              dimensions=("visit", "detector", "band")):
    exposure = connectionTypes.Input(
        doc="Input exposure to make measurements on",
        dimensions=("visit", "detector", "band"),
        storageClass="ExposureF",
        name="calexp",
    )
    inputCatalog = connectionTypes.Input(
        doc="Input catalog with existing measurements",
        dimensions=("visit", "detector", "band"),
        storageClass="SourceCatalog",
        name="src",
    )
    background = connectionTypes.Input(
        doc="Background model for the exposure",
        storageClass="Background",
        name="calexpBackground",
        dimensions=("visit", "detector", "band"),
    )
    inputCatalog = connectionTypes.Input(
        doc="Input catalog with existing measurements",
        dimensions=(
            "visit",
            "detector",
            "band",
        ),
        storageClass="SourceCatalog",
        name="src",
    )
    outputCatalog = connectionTypes.Output(
        doc="Aperture measurements",
        dimensions=("visit", "detector", "band"),
        storageClass="SourceCatalog",
        name="customAperture",
    )
    outputSchema = connectionTypes.InitOutput(
        doc="Schema created in Aperture PipelineTask",
        storageClass="SourceCatalog",
        name="customAperture_schema",
    )

    def __init__(self, *, config=None):
        super().__init__(config=config)

        if config.doLocalBackground is False:
            self.inputs.remove("background")
예제 #13
0
class CharacterizeImageConnections(pipeBase.PipelineTaskConnections,
                                   dimensions=("instrument", "visit",
                                               "detector")):
    exposure = cT.Input(
        doc="Input exposure data",
        name="postISRCCD",
        storageClass="ExposureF",
        dimensions=["instrument", "exposure", "detector"],
    )
    characterized = cT.Output(
        doc="Output characterized data.",
        name="icExp",
        storageClass="ExposureF",
        dimensions=["instrument", "visit", "detector"],
    )
    sourceCat = cT.Output(
        doc="Output source catalog.",
        name="icSrc",
        storageClass="SourceCatalog",
        dimensions=["instrument", "visit", "detector"],
    )
    backgroundModel = cT.Output(
        doc="Output background model.",
        name="icExpBackground",
        storageClass="Background",
        dimensions=["instrument", "visit", "detector"],
    )
    outputSchema = cT.InitOutput(
        doc="Schema of the catalog produced by CharacterizeImage",
        name="icSrc_schema",
        storageClass="SourceCatalog",
    )

    def adjustQuantum(self, datasetRefMap: pipeBase.InputQuantizedConnection):
        # Docstring inherited from PipelineTaskConnections
        try:
            return super().adjustQuantum(datasetRefMap)
        except pipeBase.ScalarError as err:
            raise pipeBase.ScalarError(
                f"CharacterizeImageTask can at present only be run on visits that are associated with "
                f"exactly one exposure.  Either this is not a valid exposure for this pipeline, or the "
                f"snap-combination step you probably want hasn't been configured to run between ISR and "
                f"this task (as of this writing, that would be because it hasn't been implemented yet)."
            ) from err
예제 #14
0
class CharacterizeSpotsConnections(pipeBase.PipelineTaskConnections,
                                   dimensions=("instrument", "exposure", "detector")):
    exposure = cT.Input(
        doc="Input exposure data",
        name="postISRCCD",
        storageClass="Exposure",
        dimensions=["instrument", "exposure", "detector"],
    )
    sourceCat = cT.Output(
        doc="Output source catalog.",
        name="spotSrc",
        storageClass="SourceCatalog",
        dimensions=["instrument", "exposure", "detector"],
    )
    outputSchema = cT.InitOutput(
        doc="Schema of the catalog produced by CharacterizeSpots",
        name="spotSrc_schema",
        storageClass="SourceCatalog",
    )
예제 #15
0
class VisitConnections(PipelineTaskConnections,
                       dimensions={"instrument", "visit"}):
    initIn = connectionTypes.InitInput(
        name="VisitInitIn",
        storageClass="StructuredData",
        multiple=False,
    )
    a = connectionTypes.Input(
        name="VisitA",
        storageClass="StructuredData",
        multiple=False,
        dimensions={"instrument", "visit"},
    )
    b = connectionTypes.Input(
        name="VisitB",
        storageClass="StructuredData",
        multiple=False,
        dimensions={"instrument", "visit"},
    )
    initOut = connectionTypes.InitOutput(
        name="VisitInitOut",
        storageClass="StructuredData",
        multiple=True,
    )
    outA = connectionTypes.Output(
        name="VisitOutA",
        storageClass="StructuredData",
        multiple=False,
        dimensions={"instrument", "visit"},
    )
    outB = connectionTypes.Output(
        name="VisitOutB",
        storageClass="StructuredData",
        multiple=False,
        dimensions={"instrument", "visit"},
    )

    def __init__(self, *, config=None):
        super().__init__(config=config)

        if not config.doUseInitIn:
            self.initInputs.remove("initIn")
예제 #16
0
class ForcedPhotImageConnections(PipelineTaskConnections,
                                 dimensions=("abstract_filter", "skymap",
                                             "tract", "patch"),
                                 defaultTemplates={
                                     "inputCoaddName": "deep",
                                     "outputCoaddName": "deep"
                                 }):
    inputSchema = cT.InitInput(
        doc="Schema for the input measurement catalogs.",
        name="{inputCoaddName}Coadd_ref_schema",
        storageClass="SourceCatalog",
    )
    outputSchema = cT.InitOutput(
        doc="Schema for the output forced measurement catalogs.",
        name="{outputCoaddName}Coadd_forced_src_schema",
        storageClass="SourceCatalog",
    )
    exposure = cT.Input(
        doc="Input exposure to perform photometry on.",
        name="{inputCoaddName}Coadd",
        storageClass="ExposureF",
        dimensions=["abstract_filter", "skymap", "tract", "patch"],
    )
    refCat = cT.Input(
        doc="Catalog of shapes and positions at which to force photometry.",
        name="{inputCoaddName}Coadd_ref",
        storageClass="SourceCatalog",
        dimensions=["skymap", "tract", "patch"],
    )
    refWcs = cT.Input(
        doc="Reference world coordinate system.",
        name="{inputCoaddName}Coadd.wcs",
        storageClass="Wcs",
        dimensions=["abstract_filter", "skymap", "tract", "patch"],
    )
    measCat = cT.Output(
        doc="Output forced photometry catalog.",
        name="{outputCoaddName}Coadd_forced_src",
        storageClass="SourceCatalog",
        dimensions=["abstract_filter", "skymap", "tract", "patch"],
    )
예제 #17
0
class CalibrateConnections(pipeBase.PipelineTaskConnections, dimensions=("instrument", "visit", "detector"),
                           defaultTemplates={}):

    icSourceSchema = cT.InitInput(
        doc="Schema produced by characterize image task, used to initialize this task",
        name="icSrc_schema",
        storageClass="SourceCatalog",
    )

    outputSchema = cT.InitOutput(
        doc="Schema after CalibrateTask has been initialized",
        name="src_schema",
        storageClass="SourceCatalog",
    )

    exposure = cT.Input(
        doc="Input image to calibrate",
        name="icExp",
        storageClass="ExposureF",
        dimensions=("instrument", "visit", "detector"),
    )

    background = cT.Input(
        doc="Backgrounds determined by characterize task",
        name="icExpBackground",
        storageClass="Background",
        dimensions=("instrument", "visit", "detector"),
    )

    icSourceCat = cT.Input(
        doc="Source catalog created by characterize task",
        name="icSrc",
        storageClass="SourceCatalog",
        dimensions=("instrument", "visit", "detector"),
    )

    astromRefCat = cT.PrerequisiteInput(
        doc="Reference catalog to use for astrometry",
        name="cal_ref_cat",
        storageClass="SimpleCatalog",
        dimensions=("skypix",),
        deferLoad=True,
        multiple=True,
    )

    photoRefCat = cT.PrerequisiteInput(
        doc="Reference catalog to use for photometric calibration",
        name="cal_ref_cat",
        storageClass="SimpleCatalog",
        dimensions=("skypix",),
        deferLoad=True,
        multiple=True
    )

    outputExposure = cT.Output(
        doc="Exposure after running calibration task",
        name="calexp",
        storageClass="ExposureF",
        dimensions=("instrument", "visit", "detector"),
    )

    outputCat = cT.Output(
        doc="Source catalog produced in calibrate task",
        name="src",
        storageClass="SourceCatalog",
        dimensions=("instrument", "visit", "detector"),
    )

    outputBackground = cT.Output(
        doc="Background models estimated in calibration task",
        name="calexpBackground",
        storageClass="Background",
        dimensions=("instrument", "visit", "detector"),
    )

    matches = cT.Output(
        doc="Source/refObj matches from the astrometry solver",
        name="srcMatch",
        storageClass="Catalog",
        dimensions=("instrument", "visit", "detector"),
    )

    matchesDenormalized = cT.Output(
        doc="Denormalized matches from astrometry solver",
        name="srcMatchFull",
        storageClass="Catalog",
        dimensions=("instrument", "visit", "detector"),
    )

    def __init__(self, *, config=None):
        super().__init__(config=config)

        if config.doAstrometry is False:
            self.prerequisiteInputs.remove("astromRefCat")
        if config.doPhotoCal is False:
            self.prerequisiteInputs.remove("photoRefCat")

        if config.doWriteMatches is False or config.doAstrometry is False:
            self.outputs.remove("matches")
        if config.doWriteMatchesDenormalized is False or config.doAstrometry is False:
            self.outputs.remove("matchesDenormalized")
예제 #18
0
class MultibandFitConnections(
        pipeBase.PipelineTaskConnections,
        dimensions=("tract", "patch", "skymap"),
        defaultTemplates=multibandFitBaseTemplates,
):
    cat_ref = cT.Input(
        doc="Reference multiband source catalog",
        name="{name_input_coadd}Coadd_ref",
        storageClass="SourceCatalog",
        dimensions=("tract", "patch", "skymap"),
    )
    cats_meas = cT.Input(
        doc="Deblended single-band source catalogs",
        name="{name_input_coadd}Coadd_meas",
        storageClass="SourceCatalog",
        multiple=True,
        dimensions=("tract", "patch", "band", "skymap"),
    )
    coadds = cT.Input(
        doc="Exposures on which to run fits",
        name="{name_input_coadd}Coadd_calexp",
        storageClass="ExposureF",
        multiple=True,
        dimensions=("tract", "patch", "band", "skymap"),
    )
    cat_output = cT.Output(
        doc="Measurement multi-band catalog",
        name="{name_output_coadd}Coadd_{name_output_cat}",
        storageClass="SourceCatalog",
        dimensions=("tract", "patch", "skymap"),
    )
    cat_ref_schema = cT.InitInput(
        doc="Schema associated with a ref source catalog",
        storageClass="SourceCatalog",
        name="{name_input_coadd}Coadd_ref_schema",
    )
    cat_output_schema = cT.InitOutput(
        doc="Output of the schema used in deblending task",
        name="{name_output_coadd}Coadd_{name_output_cat}_schema",
        storageClass="SourceCatalog")

    def adjustQuantum(self, inputs, outputs, label, data_id):
        """Validates the `lsst.daf.butler.DatasetRef` bands against the
        subtask's list of bands to fit and drops unnecessary bands.

        Parameters
        ----------
        inputs : `dict`
            Dictionary whose keys are an input (regular or prerequisite)
            connection name and whose values are a tuple of the connection
            instance and a collection of associated `DatasetRef` objects.
            The exact type of the nested collections is unspecified; it can be
            assumed to be multi-pass iterable and support `len` and ``in``, but
            it should not be mutated in place.  In contrast, the outer
            dictionaries are guaranteed to be temporary copies that are true
            `dict` instances, and hence may be modified and even returned; this
            is especially useful for delegating to `super` (see notes below).
        outputs : `Mapping`
            Mapping of output datasets, with the same structure as ``inputs``.
        label : `str`
            Label for this task in the pipeline (should be used in all
            diagnostic messages).
        data_id : `lsst.daf.butler.DataCoordinate`
            Data ID for this quantum in the pipeline (should be used in all
            diagnostic messages).

        Returns
        -------
        adjusted_inputs : `Mapping`
            Mapping of the same form as ``inputs`` with updated containers of
            input `DatasetRef` objects.  All inputs involving the 'band'
            dimension are adjusted to put them in consistent order and remove
            unneeded bands.
        adjusted_outputs : `Mapping`
            Mapping of updated output datasets; always empty for this task.

        Raises
        ------
        lsst.pipe.base.NoWorkFound
            Raised if there are not enough of the right bands to run the task
            on this quantum.
        """
        # Check which bands are going to be fit
        bands_fit, bands_read_only = self.config.get_band_sets()
        bands_needed = bands_fit.union(bands_read_only)

        adjusted_inputs = {}
        for connection_name, (connection, dataset_refs) in inputs.items():
            # Datasets without bands in their dimensions should be fine
            if 'band' in connection.dimensions:
                datasets_by_band = {
                    dref.dataId['band']: dref
                    for dref in dataset_refs
                }
                if not bands_needed.issubset(datasets_by_band.keys()):
                    raise pipeBase.NoWorkFound(
                        f'DatasetRefs={dataset_refs} have data with bands in the'
                        f' set={set(datasets_by_band.keys())},'
                        f' which is not a superset of the required bands={bands_needed} defined by'
                        f' {self.config.__class__}.fit_multiband='
                        f'{self.config.fit_multiband._value.__class__}\'s attributes'
                        f' bands_fit={bands_fit} and bands_read_only()={bands_read_only}.'
                        f' Add the required bands={bands_needed.difference(datasets_by_band.keys())}.'
                    )
                # Adjust all datasets with band dimensions to include just
                # the needed bands, in consistent order.
                adjusted_inputs[connection_name] = (connection, [
                    datasets_by_band[band] for band in bands_needed
                ])

        # Delegate to super for more checks.
        inputs.update(adjusted_inputs)
        super().adjustQuantum(inputs, outputs, label, data_id)
        return adjusted_inputs, {}
예제 #19
0
class MultibandFitConnections(
        pipeBase.PipelineTaskConnections,
        dimensions=("tract", "patch", "skymap"),
        defaultTemplates=multibandFitBaseTemplates,
):
    cat_ref = cT.Input(
        doc="Reference multiband source catalog",
        name="{name_input_coadd}Coadd_ref",
        storageClass="SourceCatalog",
        dimensions=("tract", "patch", "skymap"),
    )
    cats_meas = cT.Input(
        doc="Deblended single-band source catalogs",
        name="{name_input_coadd}Coadd_meas",
        storageClass="SourceCatalog",
        multiple=True,
        dimensions=("tract", "patch", "band", "skymap"),
    )
    coadds = cT.Input(
        doc="Exposures on which to run fits",
        name="{name_input_coadd}Coadd_calexp",
        storageClass="ExposureF",
        multiple=True,
        dimensions=("tract", "patch", "band", "skymap"),
    )
    cat_output = cT.Output(
        doc="Measurement multi-band catalog",
        name="{name_output_coadd}Coadd_{name_output_cat}",
        storageClass="SourceCatalog",
        dimensions=("tract", "patch", "skymap"),
    )
    cat_ref_schema = cT.InitInput(
        doc="Schema associated with a ref source catalog",
        storageClass="SourceCatalog",
        name="{name_input_coadd}Coadd_ref_schema",
    )
    cat_output_schema = cT.InitOutput(
        doc="Output of the schema used in deblending task",
        name="{name_output_coadd}Coadd_{name_output_cat}_schema",
        storageClass="SourceCatalog")

    def adjustQuantum(self, datasetRefMap):
        """Validates the `lsst.daf.butler.DatasetRef` bands against the
        subtask's list of bands to fit and drops unnecessary bands.

        Parameters
        ----------
        datasetRefMap : `NamedKeyDict`
            Mapping from dataset type to a `set` of
            `lsst.daf.butler.DatasetRef` objects

        Returns
        -------
        datasetRefMap : `NamedKeyDict`
            Modified mapping of input with possibly adjusted
            `lsst.daf.butler.DatasetRef` objects.

        Raises
        ------
        ValueError
            Raised if any of the per-band datasets have an inconsistent band
            set, or if the band set to fit is not a subset of the data bands.

        """
        datasetRefMap = super().adjustQuantum(datasetRefMap)
        # Check which bands are going to be fit
        bands_fit, bands_read_only = self.config.get_band_sets()
        bands_needed = bands_fit.union(bands_read_only)

        bands_data = None
        bands_extra = set()

        for type_d, ref_d in datasetRefMap.items():
            # Datasets without bands in their dimensions should be fine
            if 'band' in type_d.dimensions:
                bands_set = {dref.dataId['band'] for dref in ref_d}
                if bands_data is None:
                    bands_data = bands_set
                    if bands_needed != bands_data:
                        if not bands_needed.issubset(bands_data):
                            raise ValueError(
                                f'Datarefs={ref_d} have data with bands in the set={bands_set},'
                                f'which is not a subset of the required bands={bands_needed} defined by '
                                f'{self.config.__class__}.fit_multiband='
                                f'{self.config.fit_multiband._value.__class__}\'s attributes'
                                f' bands_fit={bands_fit} and bands_read_only()={bands_read_only}.'
                                f' Add the required bands={bands_needed.difference(bands_data)}.'
                            )
                        else:
                            bands_extra = bands_data.difference(bands_needed)
                elif bands_set != bands_data:
                    raise ValueError(
                        f'Datarefs={ref_d} have data with bands in the set={bands_set}'
                        f' which differs from the previous={bands_data}); bandsets must be identical.'
                    )
                if bands_extra:
                    for dref in ref_d:
                        if dref.dataId['band'] in bands_extra:
                            ref_d.remove(dref)
        return datasetRefMap
예제 #20
0
class ApertureTaskConnections(
        pipeBase.PipelineTaskConnections,
        defaultTemplates={"outputName": "customAperture"},
        dimensions=("visit", "band"),
):
    exposures = connectionTypes.Input(
        doc="Input exposure to make measurements on",
        dimensions=("visit", "detector", "band"),
        storageClass="ExposureF",
        name="calexp",
        multiple=True,
        deferLoad=True,
    )
    backgrounds = connectionTypes.Input(
        doc="Background model for the exposure",
        storageClass="Background",
        name="calexpBackground",
        dimensions=("visit", "detector", "band"),
        multiple=True,
        deferLoad=True,
    )
    inputCatalogs = connectionTypes.Input(
        doc="Input catalog with existing measurements",
        dimensions=(
            "visit",
            "detector",
            "band",
        ),
        storageClass="SourceCatalog",
        name="src",
        multiple=True,
        deferLoad=True,
    )
    outputCatalogs = connectionTypes.Output(
        doc="Aperture measurements",
        dimensions=("visit", "detector", "band"),
        storageClass="SourceCatalog",
        name="{outputName}",
        multiple=True,
    )
    outputSchema = connectionTypes.InitOutput(
        doc="Schema created in Aperture PipelineTask",
        storageClass="SourceCatalog",
        name="{outputName}_schema",
    )
    areaMasks = connectionTypes.PrerequisiteInput(
        doc="A mask of areas to be ignored",
        storageClass="Mask",
        dimensions=("visit", "detector", "band"),
        name="ApAreaMask",
        multiple=True,
        deferLoad=True,
    )

    def __init__(self, *, config=None):
        super().__init__(config=config)

        if config.doLocalBackground is False:
            self.inputs.remove("backgrounds")

    def adjustQuantum(self, inputs, outputs, label, data_id):
        # Find the data IDs common to all multiple=True inputs.
        input_names = ("exposures", "inputCatalogs", "backgrounds")
        inputs_by_data_id = []
        for name in input_names:
            inputs_by_data_id.append(
                {ref.dataId: ref
                 for ref in inputs[name][1]})
        # Intersection looks messy because dict_keys only supports |.
        # not an "intersection" method.
        data_ids_to_keep = functools.reduce(operator.__and__,
                                            (d.keys()
                                             for d in inputs_by_data_id))
        # Pull out just the DatasetRefs that are in common in the inputs
        # and order them consistently (note that consistent ordering is not
        # automatic).
        adjusted_inputs = {}
        for name, refs in zip(input_names, inputs_by_data_id):
            adjusted_inputs[name] = (
                inputs[name][0],
                [refs[data_id] for data_id in data_ids_to_keep],
            )
            # Also update the full dict of inputs, so we can pass it to
            # super() later.
            inputs[name] = adjusted_inputs[name]
        # Do the same for the outputs.
        outputs_by_data_id = {
            ref.dataId: ref
            for ref in outputs["outputCatalogs"][1]
        }
        adjusted_outputs = {
            "outputCatalogs": (
                outputs["outputCatalogs"][0],
                [outputs_by_data_id[data_id] for data_id in data_ids_to_keep],
            )
        }
        outputs["outputCatalogs"] = adjusted_outputs["outputCatalogs"]
        # Delegate to super(); ignore results because they are guaranteed
        # to be empty.
        super().adjustQuantum(inputs, outputs, label, data_id)
        return adjusted_inputs, adjusted_outputs