コード例 #1
0
def run(rerun,                          # Rerun name
        frames,                          # Frame number
        ccds,                           # CCD number
        stack,                          # Stack identifier
        patch,                          # Patch identifier
        filter,                         # Filter name
        config,                         # Configuration
        coords,                         # Skycell centre coordinates
        scale,                          # Pixel scale
        sizes,                          # Skycell size
        ignore=False,                   # Ignore missing files?
        ):
    io = pipReadWrite.ReadWrite(hsc.HscSimMapper(rerun=rerun), ['visit', 'ccd'], config=config)
    roots = config['roots']
    basename = os.path.join(roots['output'], rerun)
    stackProc = pipStack.Stack(config=config)

    identMatrix = list()
    for frame in frames:
        identList = list()
        for ccd in ccds:
            dataId = { 'visit': frame, 'ccd': ccd }
            identList.append(dataId)
        identMatrix.append(identList)

    exp = stackProc.run(identMatrix, io.inButler,
                        coords[0], coords[1], scale, sizes[0], sizes[1], ignore=ignore)

    #stack.writeFits(basename + ".fits")
    stackProc.write(io.outButler, {'stack': stack, 'patch': patch, 'filter': filter}, {"stack": exp})
コード例 #2
0
def run(
        rerun,  # Rerun name
        stack,  # Stack identifier
        filter,  # Filter name
        field,  # Field name
        scale,  # Scale, arcsec/pix
):
    io = pipReadWrite.ReadWrite(hsc.HscSimMapper(rerun=rerun),
                                ['visit', 'ccd'])

    skyPolicy = io.inButler.get('skypolicy')
    print skyPolicy.toString()
    sky = skypix.QuadSpherePixelization(
        skyPolicy.get('resolutionPix'),
        skyPolicy.get('paddingArcsec') / 3600.0)

    dataId = {'filter': filter}
    if field is not None:
        dataId['field'] = field
    skytiles = io.inButler.queryMetadata('calexp', None, 'skyTile', dataId)
    for tile in skytiles:
        dataId['skyTile'] = tile
        visits = io.inButler.queryMetadata('calexp', None, 'visit', dataId)
        if len(visits) == 0:
            continue

        geom = sky.getGeometry(tile)
        bbox = geom.getBoundingBox()
        ra, dec = bbox.getCenter()  # Degrees
        theta = bbox.getThetaExtent()  # Width, degrees
        size = int(theta * 3600.0 / scale)  # Size, pixels

        cmd = "hsc_stack.py --rerun " + rerun
        cmd += " --stack %d" % stack
        cmd += " --patch %d" % tile
        cmd += " --filter %s" % filter
        cmd += " --coords %f %f" % (ra, dec)
        cmd += " --scale %f" % scale
        cmd += " --sizes %d %d" % (size, size)
        cmd += " --frames %s" % ":".join(map(str, visits))
        cmd += " --ignore"
        print cmd
コード例 #3
0
def run(rerun,                          # Rerun name
        frame,                          # Frame number
        ccds,                           # CCD number
        patch,                          # Patch identifier
        config,                         # Configuration
        coords,                         # Skycell centre coordinates
        scale,                          # Pixel scale
        sizes,                          # Skycell size
        ):
    io = pipReadWrite.ReadWrite(hsc.HscSimMapper(rerun=rerun), ['visit', 'ccd'], config=config)
    roots = config['roots']
    basename = os.path.join(roots['output'], '%s-%d' % (rerun, frame))
    warpProc = pipWarp.Warp(config=config)

    identList = list()
    for ccd in ccds:
        dataId = { 'visit': frame, 'ccd': ccd }
        identList.append(dataId)

    warp = warpProc.run(identList, io.inButler, coords[0], coords[1], scale, sizes[0], sizes[1])

    #warp.writeFits(basename + ".fits")
    warpProc.write(io.outButler, {'visit': frame, 'skytile': skytile}, {"warp": warp})
コード例 #4
0
ファイル: showPsfs.py プロジェクト: nicolaschotard/obs_subaru
def main(dataDir,
         visit,
         title="",
         outputTxtFileName=None,
         showFwhm=False,
         minFwhm=None,
         maxFwhm=None,
         correctDistortion=False,
         showEllipticity=False,
         ellipticityDirection=False,
         showNdataFwhm=False,
         showNdataEll=False,
         minNdata=None,
         maxNdata=None,
         gridPoints=30,
         verbose=False):

    butler = dafPersist.ButlerFactory(mapper=hscSim.HscSimMapper(
        root=dataDir)).create()
    camera = butler.get("camera")

    if not (showFwhm or showEllipticity or showNdataFwhm or showNdataEll
            or outputTxtFileName):
        showFwhm = True
    #
    # Get a dict of cameraGeom::Ccd indexed by serial number
    #
    ccds = {}
    for raft in camera:
        for ccd in raft:
            ccd.setTrimmed(True)
            ccds[ccd.getId().getSerial()] = ccd
    #
    # Read all the tableSeeingMap files, converting their (x, y) to focal plane coordinates
    #
    xArr = []
    yArr = []
    ellArr = []
    fwhmArr = []
    paArr = []
    aArr = []
    bArr = []
    e1Arr = []
    e2Arr = []
    elle1e2Arr = []
    for tab in butler.subset("tableSeeingMap", visit=visit):
        # we could use tab.datasetExists() but it prints a rude message
        fileName = butler.get("tableSeeingMap_filename", **tab.dataId)[0]
        if not os.path.exists(fileName):
            continue

        with open(fileName) as fd:
            ccd = None
            for line in fd.readlines():
                if re.search(r"^\s*#", line):
                    continue
                fields = [float(_) for _ in line.split()]

                if ccd is None:
                    ccd = ccds[int(fields[0])]

                x, y, fwhm, ell, pa, a, b = fields[1:8]
                x, y = ccd.getPositionFromPixel(afwGeom.PointD(x, y)).getMm()
                xArr.append(x)
                yArr.append(y)
                ellArr.append(ell)
                fwhmArr.append(fwhm)
                paArr.append(pa)
                aArr.append(a)
                bArr.append(b)
                if len(fields) == 11:
                    e1 = fields[8]
                    e2 = fields[9]
                    elle1e2 = fields[10]
                else:
                    e1 = -9999.
                    e2 = -9999.
                    elle1e2 = -9999.
                e1Arr.append(e1)
                e2Arr.append(e2)
                elle1e2Arr.append(elle1e2)

    xArr = np.array(xArr)
    yArr = np.array(yArr)
    ellArr = np.array(ellArr)
    fwhmArr = np.array(fwhmArr) * 0.168  # arcseconds
    paArr = np.radians(np.array(paArr))
    aArr = np.array(aArr)
    bArr = np.array(bArr)

    e1Arr = np.array(e1Arr)
    e2Arr = np.array(e2Arr)
    elle1e2Arr = np.array(elle1e2Arr)

    if correctDistortion:
        import lsst.afw.geom.ellipses as afwEllipses

        dist = camera.getDistortion()
        for i in range(len(aArr)):
            axes = afwEllipses.Axes(aArr[i], bArr[i], paArr[i])
            if False:  # testing only!
                axes = afwEllipses.Axes(1.0, 1.0, np.arctan2(yArr[i], xArr[i]))
            quad = afwEllipses.Quadrupole(axes)
            quad = quad.transform(
                dist.computeQuadrupoleTransform(
                    afwGeom.PointD(xArr[i], yArr[i]), False))
            axes = afwEllipses.Axes(quad)
            aArr[i], bArr[i], paArr[i] = axes.getA(), axes.getB(
            ), axes.getTheta()

        ellArr = 1 - bArr / aArr

    if len(xArr) == 0:
        gridPoints = 0
        xs, ys = [], []
    else:
        N = gridPoints * 1j
        extent = [min(xArr), max(xArr), min(yArr), max(yArr)]
        xs, ys = np.mgrid[extent[0]:extent[1]:N, extent[2]:extent[3]:N]

    title = [
        title,
    ]

    title.append("\n#")

    if outputTxtFileName:
        f = open(outputTxtFileName, 'w')
        f.write("# %s visit %s\n" % (" ".join(title), visit))
        for x, y, ell, fwhm, pa, a, b, e1, e2, elle1e2 in zip(
                xArr, yArr, ellArr, fwhmArr, paArr, aArr, bArr, e1Arr, e2Arr,
                elle1e2Arr):
            f.write('%f %f %f %f %f %f %f %f %f %f\n' %
                    (x, y, ell, fwhm, pa, a, b, e1, e2, elle1e2))

    if showFwhm:
        title.append("FWHM (arcsec)")
        if len(xs) > 0:
            fwhmResampled = griddata(xArr, yArr, fwhmArr, xs, ys)
            plt.imshow(fwhmResampled.T,
                       extent=extent,
                       vmin=minFwhm,
                       vmax=maxFwhm,
                       origin='lower')
            plt.colorbar()

        if outputTxtFileName:

            ndataGrids = getNumDataGrids(xArr, yArr, fwhmArr, xs, ys)

            f = open(outputTxtFileName + '-fwhm-grid.txt', 'w')
            f.write("# %s visit %s\n" % (" ".join(title), visit))
            for xline, yline, fwhmline, ndataline in zip(
                    xs.tolist(), ys.tolist(), fwhmResampled.tolist(),
                    ndataGrids):
                for xx, yy, fwhm, ndata in zip(xline, yline, fwhmline,
                                               ndataline):
                    if fwhm is None:
                        fwhm = -9999
                    f.write('%f %f %f %d\n' % (xx, yy, fwhm, ndata))

    elif showEllipticity:
        title.append("Ellipticity")
        scale = 4

        if ellipticityDirection:  # we don't care about the magnitude
            ellArr = 0.1

        u = -ellArr * np.cos(paArr)
        v = -ellArr * np.sin(paArr)
        if gridPoints > 0:
            u = griddata(xArr, yArr, u, xs, ys)
            v = griddata(xArr, yArr, v, xs, ys)
            x, y = xs, ys
        else:
            x, y = xArr, yArr

        Q = plt.quiver(
            x,
            y,
            u,
            v,
            scale=scale,
            pivot="middle",
            headwidth=0,
            headlength=0,
            headaxislength=0,
        )
        keyLen = 0.10
        if not ellipticityDirection:  # we care about the magnitude
            plt.quiverkey(Q, 0.20, 0.95, keyLen, "e=%g" % keyLen, labelpos='W')

        if outputTxtFileName:
            ndataGrids = getNumDataGrids(xArr, yArr, ellArr, xs, ys)

            f = open(outputTxtFileName + '-ell-grid.txt', 'w')
            f.write("# %s visit %s\n" % (" ".join(title), visit))
            #f.write('# %f %f %f %f %f %f %f\n' % (x, y, ell, fwhm, pa, a, b))
            for xline, yline, uline, vline, ndataline in zip(
                    x.tolist(), y.tolist(), u.tolist(), v.tolist(),
                    ndataGrids):
                for xx, yy, uu, vv, ndata in zip(xline, yline, uline, vline,
                                                 ndataline):
                    if uu is None:
                        uu = -9999
                    if vv is None:
                        vv = -9999
                    f.write('%f %f %f %f %d\n' % (xx, yy, uu, vv, ndata))

    elif showNdataFwhm:
        title.append("N per fwhm grid")
        if len(xs) > 0:
            ndataGrids = getNumDataGrids(xArr, yArr, fwhmArr, xs, ys)
            plt.imshow(ndataGrids,
                       interpolation='nearest',
                       extent=extent,
                       vmin=minNdata,
                       vmax=maxNdata,
                       origin='lower')
            plt.colorbar()
        else:
            pass

    elif showNdataEll:
        title.append("N per ell grid")
        if len(xs) > 0:
            ndataGrids = getNumDataGrids(xArr, yArr, ellArr, xs, ys)
            plt.imshow(ndataGrids,
                       interpolation='nearest',
                       extent=extent,
                       vmin=minNdata,
                       vmax=maxNdata,
                       origin='lower')
            plt.colorbar()
        else:
            pass

    #plt.plot(xArr, yArr, "r.")
    #plt.plot(xs, ys, "b.")
    plt.axes().set_aspect('equal')
    plt.axis([-20000, 20000, -20000, 20000])

    def frameInfoFrom(filepath):
        import pyfits
        with pyfits.open(filepath) as hdul:
            h = hdul[0].header
            'object=ABELL2163 filter=HSC-I exptime=360.0 alt=62.11143274 azm=202.32265181 hst=(23:40:08.363-23:40:48.546)'
            return 'object=%s filter=%s exptime=%.1f azm=%.2f hst=%s' % (
                h['OBJECT'], h['FILTER01'], h['EXPTIME'], h['AZIMUTH'],
                h['HST'])

    title.insert(
        0,
        frameInfoFrom(
            butler.get('raw_filename', {
                'visit': visit,
                'ccd': 0
            })[0]))
    title.append(r'$\langle$FWHM$\rangle %4.2f$"' % np.median(fwhmArr))
    plt.title("%s visit=%s" % (" ".join(title), visit), fontsize=9)

    return plt
コード例 #5
0
def run(outName,
        rerun,
        frame1,
        frame2,
        config,
        matchTol=1.0,
        bright=None,
        ccd=None):
    io = pipReadWrite.ReadWrite(hscSim.HscSimMapper(rerun=rerun), ['visit'],
                                fileKeys=['visit', 'ccd'],
                                config=config)
    roots = config['roots']
    outName = os.path.join(roots['output'], '%s.pdf' %
                           outName) if outName is not None else None

    data1 = {'visit': frame1}
    data2 = {'visit': frame2}
    if ccd is not None:
        data1['ccd'] = ccd
        data2['ccd'] = ccd

    sources1 = io.read('src', data1, ignore=True)
    md1 = io.read('calexp_md', data1, ignore=True)
    sources2 = io.read('src', data2, ignore=True)
    md2 = io.read('calexp_md', data2, ignore=True)

    assert len(sources1) == len(md1)
    for i in range(len(sources1)):
        sources1[i] = filterSources(sources1[i], md1[i], bright)
    sources1 = concatenate(sources1)
    print len(sources1), "sources filtered from", frame1

    assert len(sources2) == len(md2)
    for i in range(len(sources2)):
        sources2[i] = filterSources(sources2[i], md2[i], bright)
    sources2 = concatenate(sources2)
    print len(sources2), "sources filtered from", frame2

    comp = pipCompare.Comparisons(sources1, sources2, matchTol=matchTol)
    print "%d matches" % comp.num

    ra = (comp['ra1'] + comp['ra2']) / 2.0
    dec = (comp['dec1'] + comp['dec2']) / 2.0

    if False:
        psfAvg = (comp['psf1'] + comp['psf2']) / 2.0
        psfDiff = comp['psf1'] - comp['psf2']
        apAvg = (comp['ap1'] + comp['ap2']) / 2.0
        apDiff = comp['ap1'] - comp['ap2']
        modelAvg = (comp['model1'] + comp['model2']) / 2.0
        modelDiff = comp['model1'] - comp['model2']
    else:
        psfAvg = (-2.5 * numpy.log10(comp['psf1']) -
                  2.5 * numpy.log10(comp['psf2'])) / 2.0
        psfDiff = -2.5 * numpy.log10(comp['psf1']) + 2.5 * numpy.log10(
            comp['psf2'])
        apAvg = (-2.5 * numpy.log10(comp['ap1']) -
                 2.5 * numpy.log10(comp['ap2'])) / 2.0
        apDiff = (-2.5 * numpy.log10(comp['ap1']) +
                  2.5 * numpy.log10(comp['ap2']))
        modelAvg = (-2.5 * numpy.log10(comp['model1']) -
                    2.5 * numpy.log10(comp['model2'])) / 2.0
        modelDiff = (-2.5 * numpy.log10(comp['model1']) +
                     2.5 * numpy.log10(comp['model2']))

    plot = plotter.Plotter(outName)
    plot.xy(ra, dec, title="Detections")
    plot.xy(psfAvg,
            psfDiff,
            axis=[comp['psf1'].min(), comp['psf1'].max(), -0.25, 0.25],
            title="PSF photometry")
    plot.xy(apAvg,
            apDiff,
            axis=[comp['ap1'].min(), comp['ap1'].max(), -0.25, 0.25],
            title="Aperture photometry")
    plot.histogram(psfDiff, [-0.25, 0.25], title="PSF photometry")
    plot.histogram(apDiff, [-0.25, 0.25], title="Aperture photometry")
    plot.histogram(modelDiff, [-0.25, 0.25], title="Model photometry")

    plot.xy(psfAvg, modelAvg - psfAvg, title="PSF vs Model")
    plot.histogram(modelAvg - psfAvg, [-0.6, 0.2],
                   bins=81,
                   title="PSF vs Model")

    plot.xy2(ra,
             comp['distance'],
             dec,
             comp['distance'],
             axis1=[ra.min(), ra.max(), 0, matchTol],
             axis2=[dec.min(), dec.max(), 0, matchTol],
             title1="Right ascension",
             title2="Declination")

    plot.quivers(ra,
                 dec,
                 comp['ra1'] - comp['ra2'],
                 comp['dec1'] - comp['dec2'],
                 title="Astrometry",
                 addUnitQuiver=1.0 / 3600.0)

    plot.close()
コード例 #6
0
def run(
        rerun,  # Rerun name
        frame,  # Frame number
        ccd,  # CCD number
        config,  # Configuration
        log=pexLog.Log.getDefaultLog(),  # Log object
):

    # Make our own mappers for now
    mapperArgs = {'rerun': rerun}  # Arguments for mapper instantiation

    if config.has_key('roots'):
        roots = config['roots']
        for key, value in {
                'data': 'root',
                'calib': 'calibRoot',
                'output': 'outRoot'
        }.iteritems():
            if roots.has_key(key):
                mapperArgs[value] = roots[key]

    camera = config['camera']
    if camera.lower() in ("hsc"):
        mapper = obsHsc.HscSimMapper(**mapperArgs)
        ccdProc = pipCcd.ProcessCcd(config=config,
                                    Calibrate=CalibrateHscDc2,
                                    log=log)
    elif camera.lower() in ("suprimecam-mit", "sc-mit", "scmit",
                            "suprimecam-old", "sc-old", "scold"):
        mapper = obsSc.SuprimecamMapper(mit=True, **mapperArgs)
        ccdProc = ProcessCcdSuprimeCam(config=config, log=log)
    elif camera.lower() in ("suprimecam", "suprime-cam", "sc"):
        mapper = obsSc.SuprimecamMapper(**mapperArgs)
        ccdProc = ProcessCcdSuprimeCam(config=config, log=log)

    io = pipReadWrite.ReadWrite(mapper, ['visit', 'ccd'], config=config)

    oldUmask = os.umask(2)
    if oldUmask != 2:
        io.log.log(io.log.WARN, "pipette umask started as: %s" % (os.umask(2)))

    dataId = {'visit': frame, 'ccd': ccd}
    raws = io.readRaw(dataId)
    detrends = io.detrends(dataId, config)

    if len([x for x in detrends
            if x]):  # We need to run at least part of the ISR
        raws = io.readRaw(dataId)
    else:
        io.fileKeys = ['visit', 'ccd']
        try:
            raws = io.read('calexp', dataId)
            config['do']['calibrate']['repair']['cosmicray'] = False
        except:
            raws = io.readRaw(dataId)
        detrends = None

    exposure, psf, apcorr, brightSources, sources, matches, matchMeta = ccdProc.run(
        raws, detrends)
    io.write(dataId, exposure=None, psf=psf, sources=None)

    catPolicy = os.path.join(os.getenv("PIPETTE_DIR"), "policy", "catalog.paf")
    catalog = pipCatalog.Catalog(catPolicy, allowNonfinite=False)

    deferredState = DeferredHSCState(dataId, io, matches, matchMeta, sources,
                                     brightSources, exposure)
    return deferredState