Ejemplo n.º 1
0
def run(
        rerun,  # Rerun name
        frame1,  # Frame number for input
        frame2,  # Frame number for template
        diff,  # Difference identifier for output
        patch,  # Sky patch identifier
        config,  # Configuration
):
    io = pipReadWrite.ReadWrite(suprimecam.SuprimecamMapper(rerun=rerun),
                                ['visit'],
                                config=config)
    diffProc = pipDiff.Diff(config=config)

    exp1 = io.inButler.get('warp', {'visit': frame1, 'patch': patch})
    exp2 = io.inButler.get('warp', {'visit': frame2, 'patch': patch})

    diffExp, sources, psf, apcorr, brightSources = diffProc.run(exp1, exp2)

    sources = afwDet.PersistableSourceVector(sources)

    diffProc.write(io.outButler, {
        'diff': diff,
        'patch': patch,
        'filter': diffExp.getFilter().getName()
    }, {
        "diff": diffExp,
        "diffpsf": psf,
        "diffsources": sources
    })
class SourceMeasurementStageParallel(harnessStage.ParallelProcessing):
    """
    Description:
        This stage wraps the measurement of sources on an exposure.
        The exposures to measure on should be in the clipboard along with the
        FootprintSet(s) to measure on those exposures. The keys for the
        exposures, and the FootprintSet(s) can be specified in the 
        policy file. If not specified, default keys will be used
    """
    def setup(self):
        self.log = Log(self.log, "SourceMeasurementStage - parallel")

        policyFile = pexPolicy.DefaultPolicyFile("meas_pipeline", 
            "SourceMeasurementStageDictionary.paf", "policy")
        defPolicy = pexPolicy.Policy.createPolicy(policyFile, policyFile.getRepositoryPath(), True)

        if self.policy is None:
            self.policy = defPolicy
        else:
            self.policy.mergeDefaults(defPolicy.getDictionary())
        
    def process(self, clipboard):
        """
        Measure sources in the worker process
        """
        self.log.log(Log.INFO, "Measuring Sources in process")
        
        #this may raise exceptions
        try:
            measurePolicy, exposure, psf, positiveDetection, negativeDetection = \
                           self.getClipboardData(clipboard)
        except pexExcept.LsstException, e:
            self.log.log(Log.FATAL, str(e))
         
        #
        # Need to do something smart about merging positive and negative
        # detection sets.             
        #
        # For now, assume they are disjoint sets, so merge is trivial
        #
        footprintLists = []
        if positiveDetection:
            self.log.log(Log.DEBUG, "Positive FootprintSet found")
            isNegative = False
            footprintLists.append([positiveDetection.getFootprints(), isNegative])

        if negativeDetection:
            self.log.log(Log.DEBUG, "Negative FootprintSet found")
            isNegative = True
            footprintLists.append([negativeDetection.getFootprints(), isNegative])

        sourceSet = srcMeas.sourceMeasurement(exposure, psf, footprintLists, measurePolicy)
        
        # place SourceSet on the clipboard
        sourceKey = self.policy.get("outputKeys.sources")
        clipboard.put(sourceKey, sourceSet)
        clipboard.put(sourceKey + "_persistable", afwDet.PersistableSourceVector(sourceSet))
class SourceMeasurementPsfFluxStageParallel(harnessStage.ParallelProcessing):
    """
    Description:
        This stage wraps the measurement of sources on an exposure.
        The exposures to measure on should be in the clipboard along with the
        FootprintSet(s) to measure on those exposures. The keys for the
        exposures, and the FootprintSet(s) can be specified in the 
        policy file. If not specified, default keys will be used
    """
    def setup(self):
        self.log = Log(self.log, "SourceMeasurementPsfFluxStage - parallel")

        policyFile = pexPolicy.DefaultPolicyFile(
            "meas_pipeline", "SourceMeasurementPsfFluxStageDictionary.paf",
            "policy")
        defPolicy = pexPolicy.Policy.createPolicy(
            policyFile, policyFile.getRepositoryPath(), True)

        if self.policy is None:
            self.policy = defPolicy
        else:
            self.policy.mergeDefaults(defPolicy.getDictionary())

    def process(self, clipboard):
        """
        Measure sources in the worker process
        """
        self.log.log(Log.INFO, "Measuring Sources' psfFluxes in process")

        #this may raise exceptions
        try:
            exposure, sourceSet = self.getClipboardData(clipboard)
        except pexExcept.LsstException, e:
            self.log.log(Log.FATAL, str(e))

        srcMeas.sourceMeasurementPsfFlux(exposure, sourceSet)

        # place SourceSet on the clipboard.  We need to do this because of the _persistable version
        # even though there's already a sourceSet. Damn.
        sourceKey = self.policy.get("outputKeys.sourceSet")
        clipboard.put(sourceKey, sourceSet)
        clipboard.put(sourceKey + "_persistable",
                      afwDet.PersistableSourceVector(sourceSet))
Ejemplo n.º 4
0
    def write(self,
              dataId,
              exposure=None,
              psf=None,
              sources=None,
              brightSources=None,
              matches=None,
              matchMeta=None,
              **kwargs):
        """Write processed data.

        @param dataId Data identifier for butler
        @param exposure Exposure to write, or None
        @param psf PSF to write, or None
        @param sources Sources to write, or None
        @param matches Matches to write, or None
        """
        if exposure is not None:
            self.log.log(self.log.INFO, "Writing exposure: %s" % (dataId))
            self.outButler.put(exposure, 'calexp', dataId)
        if psf is not None:
            self.log.log(self.log.INFO, "Writing PSF: %s" % (dataId))
            self.outButler.put(psf, 'psf', dataId)
        if sources is not None:
            self.log.log(self.log.INFO, "Writing sources: %s" % (dataId))
            self.outButler.put(afwDet.PersistableSourceVector(sources), 'src',
                               dataId)
        if matches is not None:
            try:
                self.log.log(self.log.INFO, "Writing matches: %s" % (dataId))
                smv = afwDet.SourceMatchVector()
                for match in matches:
                    smv.push_back(match)
                self.outButler.put(
                    afwDet.PersistableSourceMatchVector(smv, matchMeta),
                    'icMatch', dataId)

                if brightSources is None:
                    brightSources = afwDet.SourceSet()
                    for match in matches:
                        brightSources.push_back(match.second)
            except Exception, e:
                self.log.log(self.log.WARN, "Unable to write matches: %s" % e)
    def process(self, clipboard):
        self.log.log(Log.INFO, "Making a persistable source vector in process")

        sourceSet = clipboard.get(self.policy.get("inputkeys.sourceSet"))
        clipboard.put(self.policy.get("outputKeys.persistable"),
                      afwDet.PersistableSourceVector(sourceSet))
Ejemplo n.º 6
0
class ReadWrite(object):
    """ReadWrite provides I/O for pipette (LSST algorithms testing)"""
    def __init__(
            self,  # ReadWrite
            mappers,  # Mapper or mapper class to use
            ccdKeys,  # Data keywords required to specify a CCD
            fileKeys=None,  # Data keywords required to specify a file
            config=None,  # Configuration
    ):
        """Initialisation

        @param mapper Data mapper (class or instance) for persistence
        @param config Configuration (for instantiating mapper)
        """

        # if we got a list, it contains [inMapper, outMapper]
        if isinstance(mappers, list) and len(mappers) == 2:
            inMapper, outMapper = mappers
        # if we got a mapper, use it for both input and output
        elif (isinstance(mappers, dafPersist.Mapper)
              or issubclass(mappers, dafPersist.Mapper)):
            inMapper, outMapper = mappers, mappers
        # punt
        else:
            raise RuntimeError(
                "'mapper' must be a dafPersist.Mapper (or derived from), or a list containing two of them (in and out)."
            )

        self.log = pexLog.Log(pexLog.getDefaultLog(), "ReadWrite")

        self.inMapper = initMapper(inMapper, config, self.log, inMap=True)
        self.ibf = dafPersist.ButlerFactory(mapper=self.inMapper)
        self.inButler = self.ibf.create()

        self.outMapper = initMapper(outMapper, config, self.log, inMap=False)
        self.obf = dafPersist.ButlerFactory(mapper=self.outMapper)
        self.outButler = self.obf.create()

        self.ccdKeys = ccdKeys
        if fileKeys is None:
            fileKeys = list(ccdKeys)
        if isinstance(fileKeys, basestring):
            fileKeys = [fileKeys]
        self.fileKeys = fileKeys
        return

    def lookup(self, dataId):
        """Lookup data for a CCD.

        @param dataId Data identifier for butler
        @returns Complete data identifiers
        """
        for key in self.ccdKeys:
            if not dataId.has_key(key):
                raise KeyError("Data identifier does not contain keyword %s" %
                               key)
        keys = self.inButler.queryMetadata('raw',
                                           self.ccdKeys,
                                           format=self.fileKeys,
                                           dataId=dataId)

        identifiers = list()
        for key in keys:
            ident = dict()
            if not isinstance(key, basestring) and hasattr(key, "__getitem__"):
                for index, name in enumerate(self.fileKeys):
                    ident[name] = key[index]
            else:
                assert (len(self.fileKeys) == 1)
                ident[self.fileKeys[0]] = key
            identifiers.append(ident)
        return identifiers

    @timecall
    def readRaw(self, dataId):
        """Read raw data of a CCD.

        @param dataId Data identifier for butler
        @returns Raw exposures
        """
        self.log.log(self.log.INFO, "Looking for: %s" % (dataId))
        identifiers = self.lookup(dataId)
        if not identifiers:
            raise RuntimeError("No raw data found for dataId %s" % (dataId))

        exposures = list()
        for ident in identifiers:
            ident.update(dataId)
            if not self.inButler.datasetExists('raw', ident):
                raise RuntimeError("Raw data does not exist for %s" % ident)
            self.log.log(self.log.DEBUG, "Reading: %s" % (ident))
            exp = self.inButler.get('raw', ident)
            if isinstance(exp, afwImage.ExposureU):
                exp = exp.convertF()
            exposures.append(exp)
        return exposures

    def readMatches(self, dataId, ignore=False):
        """Read matches, sources and catalogue; combine.

        @param dataId Data identifier for butler
        @param ignore Ignore non-existent data?
        @returns Matches
        """
        sources = self.read('icSrc', dataId, ignore=ignore)
        matches = self.read('icMatch', dataId, ignore=ignore)
        headers = self.read('calexp_md', dataId, ignore=ignore)

        output = []
        for sourceList, matchList, header in zip(sources, matches, headers):
            wcs = afwImage.makeWcs(header)
            width, height = header.get('NAXIS1'), header.get('NAXIS2')

            matches = measAstrom.generateMatchesFromMatchList(
                matchList,
                sourceList.getSources(),
                wcs,
                width,
                height,
                log=self.log)

            output.append(matches)
        return output

    @timecall
    def read(self, which, dataId, ignore=False):
        """Read some data.

        @param which Type of data to read
        @param dataId Data identifier for butler
        @returns Raw exposures
        """
        identifiers = self.lookup(dataId)
        data = list()
        for ident in identifiers:
            ident.update(dataId)
            for i, butler in enumerate([self.inButler, self.outButler]):
                if not butler.datasetExists(which, ident):
                    if i == 1:
                        if not ignore:
                            raise RuntimeError(
                                "Data type %s does not exist for %s" %
                                (which, ident))
                else:
                    self.log.log(self.log.DEBUG,
                                 "Reading %s: %s" % (which, ident))
                    data.append(butler.get(which, ident))
                    break
        return data

    @timecall
    def detrends(self, dataId, config):
        """Read detrends for a CCD.

        @param dataId Data identifier for butler
        @param config Configuration (for which detrends to read)
        @returns Dict of lists for each detrend type
        """
        identifiers = self.lookup(dataId)
        detrends = dict()
        do = config['do']['isr']
        if not do['enabled']:
            return detrends

        for kind in ('bias', 'dark', 'flat'):
            if do[kind]:
                detList = list()
                for ident in identifiers:
                    ident.update(dataId)
                    if not self.inButler.datasetExists(kind, ident):
                        raise RuntimeError(
                            "Data type %s does not exist for %s" %
                            (kind, ident))
                    self.log.log(self.log.DEBUG,
                                 "Reading %s for %s" % (kind, ident))
                    detrend = self.inButler.get(kind, ident)
                    detList.append(detrend)
                detrends[kind] = detList
        # Fringe depends on the filter
        if do['fringe'] and config['fringe'].has_key('filters'):
            fringeList = list()
            for ident in identifiers:
                ident.update(dataId)
                filterList = self.inButler.queryMetadata(
                    "raw", None, "filter", ident)
                assert len(
                    filterList
                ) == 1, "Filter query is non-unique: %s" % filterList
                filtName = filterList[0]
                if filtName in config['fringe']['filters']:
                    if not self.inButler.datasetExists('fringe', ident):
                        raise RuntimeError(
                            "Data type fringe does not exist for %s" % ident)
                    self.log.log(self.log.DEBUG,
                                 "Reading fringe for %s" % (ident))
                    fringe = self.inButler.get("fringe", ident)
                    fringeList.append(fringe)
            if len(fringeList) > 0:
                detrends['fringe'] = fringeList
        return detrends

    @timecall
    def write(self,
              dataId,
              exposure=None,
              psf=None,
              sources=None,
              brightSources=None,
              matches=None,
              matchMeta=None,
              **kwargs):
        """Write processed data.

        @param dataId Data identifier for butler
        @param exposure Exposure to write, or None
        @param psf PSF to write, or None
        @param sources Sources to write, or None
        @param matches Matches to write, or None
        """
        if exposure is not None:
            self.log.log(self.log.INFO, "Writing exposure: %s" % (dataId))
            self.outButler.put(exposure, 'calexp', dataId)
        if psf is not None:
            self.log.log(self.log.INFO, "Writing PSF: %s" % (dataId))
            self.outButler.put(psf, 'psf', dataId)
        if sources is not None:
            self.log.log(self.log.INFO, "Writing sources: %s" % (dataId))
            self.outButler.put(afwDet.PersistableSourceVector(sources), 'src',
                               dataId)
        if matches is not None:
            try:
                self.log.log(self.log.INFO, "Writing matches: %s" % (dataId))
                smv = afwDet.SourceMatchVector()
                for match in matches:
                    smv.push_back(match)
                self.outButler.put(
                    afwDet.PersistableSourceMatchVector(smv, matchMeta),
                    'icMatch', dataId)

                if brightSources is None:
                    brightSources = afwDet.SourceSet()
                    for match in matches:
                        brightSources.push_back(match.second)
            except Exception, e:
                self.log.log(self.log.WARN, "Unable to write matches: %s" % e)

        if brightSources is not None:
            self.log.log(self.log.INFO,
                         "Writing calibration sources: %s" % (dataId))
            self.outButler.put(afwDet.PersistableSourceVector(brightSources),
                               'icSrc', dataId)

        return