예제 #1
0
파일: color.py 프로젝트: rnikutta/afw
    def testPhotom(self):
        """Test the zero-point information"""
        
        flux, fluxErr = 1000.0, 10.0
        flux0, flux0Err = 1e12, 1e10
        self.calib.setFluxMag0(flux0)

        self.assertEqual(flux0, self.calib.getFluxMag0()[0])
        self.assertEqual(0.0, self.calib.getFluxMag0()[1])
        self.assertEqual(22.5, self.calib.getMagnitude(flux))
        # Error just in flux
        self.assertAlmostEqual(self.calib.getMagnitude(flux, fluxErr)[1], 2.5/math.log(10)*fluxErr/flux)
        # Error just in flux0
        self.calib.setFluxMag0(flux0, flux0Err)
        self.assertEqual(flux0Err, self.calib.getFluxMag0()[1])
        self.assertAlmostEqual(self.calib.getMagnitude(flux, 0)[1], 2.5/math.log(10)*flux0Err/flux0)

        self.assertAlmostEqual(flux0, self.calib.getFlux(0))
        self.assertAlmostEqual(flux, self.calib.getFlux(22.5))

        # I don't know how to test round-trip if fluxMag0 is significant compared to fluxErr
        self.calib.setFluxMag0(flux0, flux0 / 1e6)
        for fluxErr in (flux / 1e2, flux / 1e4):
            mag, magErr = self.calib.getMagnitude(flux, fluxErr)
            self.assertAlmostEqual(flux, self.calib.getFlux(mag, magErr)[0])
            self.assertTrue(abs(fluxErr - self.calib.getFlux(mag, magErr)[1]) < 1.0e-4)

        # Test context manager; shouldn't raise an exception within the block, should outside
        with imageUtils.CalibNoThrow():
            self.assert_(numpy.isnan(self.calib.getMagnitude(-50.0)))
        self.assertRaises(pexExcept.DomainError, self.calib.getMagnitude, -50.0)
def extract_photometry(butler,
                       dataId,
                       forced_dataset,
                       filt,
                       object_id,
                       names_to_copy,
                       phot_type='base_PsfFlux'):
    # Can grab filter, mjd from 'calexp_md' call on visit
    md = butler.get('calexp_md', dataId=dataId, immediate=True)
    mjd = md.get('MJD-OBS')
    #        filt = md.get('FILTER')  # But that's not being set right now so we'll keep using f

    this_measurement = butler.get(forced_dataset, dataId).asAstropy()
    source_row, = np.where(this_measurement['objectId'] == object_id)
    if len(source_row) != 1:
        return None

    # 'this_measurement' is a table, but we're only extracting the first entry from each column
    new_row = {n: this_measurement[n][source_row] for n in names_to_copy}
    #        new_row['filter'] = dataId['filter']
    new_row['filter'] = filt
    new_row['mjd'] = mjd

    # Calibrate to magnitudes
    # The calibration information for the calexp
    # should still apply to the difference image
    calib = afwImage.Calib(md)
    with afwImageUtils.CalibNoThrow():
        new_row['%s_mag' % phot_type], new_row['%s_magSigma' % phot_type] = \
            calib.getMagnitude(new_row['%s_flux' % phot_type],
                               new_row['%s_fluxSigma' % phot_type])
    flux_mag_0, flux_magSigma_0 = calib.getFluxMag0()
    flux_mag_25 = 10**(-0.4 * 25) * flux_mag_0
    flux_norm = 1 / flux_mag_25
    new_row['%s_flux_zp25' % phot_type] = \
        flux_norm * new_row['%s_flux' % phot_type]
    new_row['%s_fluxSigma_zp25' % phot_type] = \
        flux_norm * new_row['%s_fluxSigma' % phot_type]
    return new_row
예제 #3
0
    def _loadAndMatchCatalogs(self, repo, dataIds, matchRadius):
        """Load data from specific visit. Match with reference.

        Parameters
        ----------
        repo : string
            The repository.  This is generally the directory on disk
            that contains the repository and mapper.
        dataIds : list of dict
            List of `butler` data IDs of Image catalogs to compare to
            reference. The `calexp` cpixel image is needed for the photometric
            calibration.
        matchRadius :  afwGeom.Angle(), optional
            Radius for matching. Default is 1 arcsecond.

        Returns
        -------
        afw.table.GroupView
            An object of matched catalog.
        """
        # Following
        # https://github.com/lsst/afw/blob/tickets/DM-3896/examples/repeatability.ipynb
        butler = dafPersist.Butler(repo)
        dataset = 'src'

        # 2016-02-08 MWV:
        # I feel like I could be doing something more efficient with
        # something along the lines of the following:
        #    dataRefs = [dafPersist.ButlerDataRef(butler, vId) for vId in dataIds]

        ccdKeyName = getCcdKeyName(dataIds[0])

        schema = butler.get(dataset + "_schema", immediate=True).schema
        mapper = SchemaMapper(schema)
        mapper.addMinimalSchema(schema)
        mapper.addOutputField(Field[float]('base_PsfFlux_snr', 'PSF flux SNR'))
        mapper.addOutputField(Field[float]('base_PsfFlux_mag',
                                           'PSF magnitude'))
        mapper.addOutputField(Field[float]('base_PsfFlux_magerr',
                                           'PSF magnitude uncertainty'))
        newSchema = mapper.getOutputSchema()

        # Create an object that matches multiple catalogs with same schema
        mmatch = MultiMatch(newSchema,
                            dataIdFormat={
                                'visit': np.int32,
                                ccdKeyName: np.int32
                            },
                            radius=matchRadius,
                            RecordClass=SimpleRecord)

        # create the new extented source catalog
        srcVis = SourceCatalog(newSchema)

        for vId in dataIds:
            try:
                calexpMetadata = butler.get("calexp_md", vId, immediate=True)
            except (FitsError, dafPersist.NoResults) as e:
                print(e)
                print("Could not open calibrated image file for ", vId)
                print("Skipping %s " % repr(vId))
                continue
            except TypeError as te:
                # DECam images that haven't been properly reformatted
                # can trigger a TypeError because of a residual FITS header
                # LTV2 which is a float instead of the expected integer.
                # This generates an error of the form:
                #
                # lsst::pex::exceptions::TypeError: 'LTV2 has mismatched type'
                #
                # See, e.g., DM-2957 for details.
                print(te)
                print("Calibration image header information malformed.")
                print("Skipping %s " % repr(vId))
                continue

            calib = afwImage.Calib(calexpMetadata)

            oldSrc = butler.get('src', vId, immediate=True)
            print(
                len(oldSrc), "sources in ccd %s  visit %s" %
                (vId[ccdKeyName], vId["visit"]))

            # create temporary catalog
            tmpCat = SourceCatalog(SourceCatalog(newSchema).table)
            tmpCat.extend(oldSrc, mapper=mapper)
            tmpCat['base_PsfFlux_snr'][:] = tmpCat['base_PsfFlux_flux'] \
                / tmpCat['base_PsfFlux_fluxSigma']
            with afwImageUtils.CalibNoThrow():
                _ = calib.getMagnitude(tmpCat['base_PsfFlux_flux'],
                                       tmpCat['base_PsfFlux_fluxSigma'])
                tmpCat['base_PsfFlux_mag'][:] = _[0]
                tmpCat['base_PsfFlux_magerr'][:] = _[1]

            srcVis.extend(tmpCat, False)
            mmatch.add(catalog=tmpCat, dataId=vId)

        # Complete the match, returning a catalog that includes
        # all matched sources with object IDs that can be used to group them.
        matchCat = mmatch.finish()

        # Create a mapping object that allows the matches to be manipulated
        # as a mapping of object ID to catalog of sources.
        allMatches = GroupView.build(matchCat)

        return allMatches
예제 #4
0
    def _loadAndMatchCatalogs(self,
                              repo,
                              dataIds,
                              matchRadius,
                              useJointCal=False):
        """Load data from specific visit. Match with reference.

        Parameters
        ----------
        repo : string or Butler
            A Butler or a repository URL that can be used to construct one
        dataIds : list of dict
            List of `butler` data IDs of Image catalogs to compare to
            reference. The `calexp` cpixel image is needed for the photometric
            calibration.
        matchRadius :  afwGeom.Angle(), optional
            Radius for matching. Default is 1 arcsecond.

        Returns
        -------
        afw.table.GroupView
            An object of matched catalog.
        """
        # Following
        # https://github.com/lsst/afw/blob/tickets/DM-3896/examples/repeatability.ipynb
        if isinstance(repo, dafPersist.Butler):
            butler = repo
        else:
            butler = dafPersist.Butler(repo)
        dataset = 'src'

        # 2016-02-08 MWV:
        # I feel like I could be doing something more efficient with
        # something along the lines of the following:
        #    dataRefs = [dafPersist.ButlerDataRef(butler, vId) for vId in dataIds]

        ccdKeyName = getCcdKeyName(dataIds[0])

        schema = butler.get(dataset + "_schema").schema
        mapper = SchemaMapper(schema)
        mapper.addMinimalSchema(schema)
        mapper.addOutputField(Field[float]('base_PsfFlux_snr', 'PSF flux SNR'))
        mapper.addOutputField(Field[float]('base_PsfFlux_mag',
                                           'PSF magnitude'))
        mapper.addOutputField(Field[float]('base_PsfFlux_magErr',
                                           'PSF magnitude uncertainty'))
        newSchema = mapper.getOutputSchema()
        newSchema.setAliasMap(schema.getAliasMap())

        # Create an object that matches multiple catalogs with same schema
        mmatch = MultiMatch(newSchema,
                            dataIdFormat={
                                'visit': np.int32,
                                ccdKeyName: np.int32
                            },
                            radius=matchRadius,
                            RecordClass=SimpleRecord)

        # create the new extented source catalog
        srcVis = SourceCatalog(newSchema)

        for vId in dataIds:

            if useJointCal:
                try:
                    photoCalib = butler.get("photoCalib", vId)
                except (FitsError, dafPersist.NoResults) as e:
                    print(e)
                    print("Could not open photometric calibration for ", vId)
                    print("Skipping %s " % repr(vId))
                    continue
                try:
                    md = butler.get("wcs_md", vId)
                    wcs = afwImage.makeWcs(md)
                except (FitsError, dafPersist.NoResults) as e:
                    print(e)
                    print("Could not open updated WCS for ", vId)
                    print("Skipping %s " % repr(vId))
                    continue
            else:
                try:
                    calexpMetadata = butler.get("calexp_md", vId)
                except (FitsError, dafPersist.NoResults) as e:
                    print(e)
                    print("Could not open calibrated image file for ", vId)
                    print("Skipping %s " % repr(vId))
                    continue
                except TypeError as te:
                    # DECam images that haven't been properly reformatted
                    # can trigger a TypeError because of a residual FITS header
                    # LTV2 which is a float instead of the expected integer.
                    # This generates an error of the form:
                    #
                    # lsst::pex::exceptions::TypeError: 'LTV2 has mismatched type'
                    #
                    # See, e.g., DM-2957 for details.
                    print(te)
                    print("Calibration image header information malformed.")
                    print("Skipping %s " % repr(vId))
                    continue

                calib = afwImage.Calib(calexpMetadata)

            # We don't want to put this above the first "if useJointCal block"
            # because we need to use the first `butler.get` above to quickly
            # catch data IDs with no usable outputs.
            try:
                # HSC supports these flags, which dramatically improve I/O
                # performance; support for other cameras is DM-6927.
                oldSrc = butler.get('src', vId, flags=SOURCE_IO_NO_FOOTPRINTS)
            except:
                oldSrc = butler.get('src', vId)
            print(
                len(oldSrc), "sources in ccd %s  visit %s" %
                (vId[ccdKeyName], vId["visit"]))

            # create temporary catalog
            tmpCat = SourceCatalog(SourceCatalog(newSchema).table)
            tmpCat.extend(oldSrc, mapper=mapper)
            tmpCat['base_PsfFlux_snr'][:] = tmpCat['base_PsfFlux_flux'] \
                / tmpCat['base_PsfFlux_fluxSigma']

            if useJointCal:
                for record in tmpCat:
                    record.updateCoord(wcs)
                photoCalib.instFluxToMagnitude(tmpCat, "base_PsfFlux",
                                               "base_PsfFlux")
            else:
                with afwImageUtils.CalibNoThrow():
                    _ = calib.getMagnitude(tmpCat['base_PsfFlux_flux'],
                                           tmpCat['base_PsfFlux_fluxSigma'])
                    tmpCat['base_PsfFlux_mag'][:] = _[0]
                    tmpCat['base_PsfFlux_magErr'][:] = _[1]

            srcVis.extend(tmpCat, False)
            mmatch.add(catalog=tmpCat, dataId=vId)

        # Complete the match, returning a catalog that includes
        # all matched sources with object IDs that can be used to group them.
        matchCat = mmatch.finish()

        # Create a mapping object that allows the matches to be manipulated
        # as a mapping of object ID to catalog of sources.
        allMatches = GroupView.build(matchCat)

        return allMatches
def assemble_catalogs_into_lightcurve(dataIds_by_filter,
                                      repo_dir,
                                      source_row=0,
                                      dataset='calexp',
                                      debug=False):
    """Return Table with measurements."""
    butler = dafPersist.Butler(repo_dir)

    names_to_copy = [
        'objectId', 'coord_ra', 'coord_dec', 'parentObjectId',
        'base_RaDecCentroid_x', 'base_RaDecCentroid_y', 'base_PsfFlux_flux',
        'base_PsfFlux_fluxSigma'
    ]
    # flux_zp25 is flux normalized to a zeropoint of 25.
    # This convention is useful and appropriate for transient sources
    # that are expected to be negative as well as positive
    # for a given lightcurve.
    names_to_generate = [
        'filter', 'mjd', 'base_PsfFlux_mag', 'base_PsfFlux_magSigma',
        'base_PsfFlux_flux_zp25', 'base_PsfFlux_fluxSigma_zp25'
    ]
    names = names_to_generate + names_to_copy
    dtype = (str, float, float, float, float, float, int, float, float, int,
             float, float, float, float)
    table = Table(names=names, dtype=dtype)

    if dataset == 'deepDiff_differenceExp':
        prefix = 'deepDiff_'
    else:
        prefix = ''
    forced_dataset = prefix + 'forcedRaDec_src'
    if debug:
        print("FORCED_DATASET: ", forced_dataset)

    for f, dataIds in dataIds_by_filter.items():
        for dataId in dataIds:
            # Can grab filter, mjd from 'calexp_md' call on visit
            md = butler.get('calexp_md', dataId=dataId, immediate=True)
            mjd = md.get('MJD-OBS')
            #        filt = md.get('FILTER')  # But that's not being set right now so we'll keep using f

            this_measurement = butler.get(forced_dataset, dataId)
            # 'this_measurement' is a table, but we're only extracting the first entry from each column
            cols_for_new_row = {
                n: this_measurement[n][source_row]
                for n in names_to_copy
            }
            #        cols_for_new_row['filter'] = dataId['filter']
            cols_for_new_row['filter'] = f
            cols_for_new_row['mjd'] = mjd

            # Calibrate to magnitudes
            # The calibration information for the calexp
            # should still apply to the difference image
            calib = afwImage.Calib(md)
            with afwImageUtils.CalibNoThrow():
                cols_for_new_row['base_PsfFlux_mag'], cols_for_new_row['base_PsfFlux_magSigma'] = \
                    calib.getMagnitude(cols_for_new_row['base_PsfFlux_flux'],
                                       cols_for_new_row['base_PsfFlux_fluxSigma'])
            flux_mag_0, flux_magSigma_0 = calib.getFluxMag0()
            flux_mag_25 = 10**(-0.4 * 25) * flux_mag_0
            flux_norm = 1 / flux_mag_25
            cols_for_new_row['base_PsfFlux_flux_zp25'] = \
                flux_norm * cols_for_new_row['base_PsfFlux_flux']
            cols_for_new_row['base_PsfFlux_fluxSigma_zp25'] = \
                flux_norm * cols_for_new_row['base_PsfFlux_fluxSigma']

            table.add_row(cols_for_new_row)

    return table