Example #1
0
 def setUp(self):
     datadir = os.getenv("TESTDATA_SUBARU_DIR")
     assert datadir, "testdata_subaru is not setup"
     self.bf = dafPersist.ButlerFactory(
         mapper=SuprimecamMapper(root=os.path.join(datadir, "science"),
                                 calibRoot=os.path.join(datadir, "calib")))
     self.butler = self.bf.create()
Example #2
0
 def generateInputList(self):
     with open("ccdlist", "w") as inputFile:
         print >> inputFile, ">intids visit"
         import lsst.daf.persistence as dafPersist
         from lsst.obs.lsstSim import LsstSimMapper
         butler = dafPersist.ButlerFactory(mapper=LsstSimMapper(
             root=self.inputDirectory)).create()
         numInputs = 0
         for sensorRef in butler.subset("raw", "sensor"):
             numChannels = 0
             for channelRef in sensorRef.subItems():
                 if butler.datasetExists("raw", channelRef.dataId):
                     numChannels += 1
             id = "visit=%(visit)d raft=%(raft)s sensor=%(sensor)s" % \
                     sensorRef.dataId
             if numChannels == 32:
                 print >> inputFile, "raw", id
                 numInputs += 1
                 if numInputs >= self.options.ccdCount:
                     break
             else:
                 print >>sys.stderr, "Warning:", id, \
                         "has %d channel files (should be 32);" % \
                         (numChannels,), "not processing"
         for i in xrange(self.nPipelines):
             print >> inputFile, "raw visit=0 raft=0 sensor=0"
 def testButlerQueryMetadata(self):
     mapper = MinMapper2(root=ROOT)
     butler = dafPersist.ButlerFactory(mapper=mapper).create()
     kwargs = {
         "ccd": 35,
         "filter": "r",
         "visit": 787731,
         "taiObs": "2005-04-02T09:24:49.933440000"
     }
     self.assertEqual(butler.queryMetadata("other", "visit", **kwargs),
                      [787731])
     self.assertEqual(
         butler.queryMetadata("other",
                              "visit",
                              visit=kwargs["visit"],
                              ccd=kwargs["ccd"],
                              taiObs=kwargs["taiObs"],
                              filter=kwargs["filter"]), [787731])
     # now test we get no matches if ccd is out of range
     self.assertEqual(
         butler.queryMetadata("raw",
                              "ccd",
                              ccd=36,
                              filter="r",
                              visit=787731), [])
 def csvAll(self, sql=None):
     """Extract/compute metadata for all single frame exposures matching
     at least one data ID specification, and store it in CSV files.
     """
     # Writer column name header line for calexp metadata CSV
     self.mdFile.write('scienceCcdExposureId', 'metadataKey',
                       'exposureType', 'intValue', 'doubleValue',
                       'stringValue')
     conn = sql.getConn() if sql else None
     cursor = conn.cursor() if conn else None
     # Loop over input roots
     for root in self.namespace.inroot:
         print('Ingesting from ' + root)
         if hasattr(self.namespace, 'registry'):
             registry = self.namespace.registry
         else:
             registry = os.path.join(root, 'registry.sqlite3')
         cls = getMapperClass(self.camera)
         cameraMapper = cls(root=root, registry=registry)
         butler = dafPersistence.ButlerFactory(mapper=cameraMapper).create()
         scanner = DatasetScanner(dataset='calexp',
                                  camera=self.camera,
                                  cameraMapper=cameraMapper)
         # scan the root for matching calexps
         for path, dataId in scanner.walk(root, self.namespace.rules):
             self.toCsv(butler, root, path, dataId, cursor)
     if cursor:
         cursor.close()
     if conn:
         conn.close()
     self.expFile.flush()
     self.mdFile.flush()
     self.polyFile.flush()
     self.polyFile.close()
Example #5
0
    def __init__(
            self,  # ReadWrite
            mappers,  # Mapper or mapper class to use
            ccdKeys,  # Data keywords required to specify a CCD
            fileKeys=None,  # Data keywords required to specify a file
            config=None,  # Configuration
    ):
        """Initialisation

        @param mapper Data mapper (class or instance) for persistence
        @param config Configuration (for instantiating mapper)
        """

        # if we got a list, it contains [inMapper, outMapper]
        if isinstance(mappers, list) and len(mappers) == 2:
            inMapper, outMapper = mappers
        # if we got a mapper, use it for both input and output
        elif (isinstance(mappers, dafPersist.Mapper)
              or issubclass(mappers, dafPersist.Mapper)):
            inMapper, outMapper = mappers, mappers
        # punt
        else:
            raise RuntimeError(
                "'mapper' must be a dafPersist.Mapper (or derived from), or a list containing two of them (in and out)."
            )

        self.log = pexLog.Log(pexLog.getDefaultLog(), "ReadWrite")

        self.inMapper = initMapper(inMapper, config, self.log, inMap=True)
        self.ibf = dafPersist.ButlerFactory(mapper=self.inMapper)
        self.inButler = self.ibf.create()

        self.outMapper = initMapper(outMapper, config, self.log, inMap=False)
        self.obf = dafPersist.ButlerFactory(mapper=self.outMapper)
        self.outButler = self.obf.create()

        self.ccdKeys = ccdKeys
        if fileKeys is None:
            fileKeys = list(ccdKeys)
        if isinstance(fileKeys, basestring):
            fileKeys = [fileKeys]
        self.fileKeys = fileKeys
        return
Example #6
0
 def test_read_defects(self):
     butler = dafPersist.ButlerFactory(mapper=self.mapper).create()
     cam = butler.get('camera')
     defects_path = os.path.join(ROOT, 'trivial_camera', 'defects')
     defects = read_all_defects(defects_path, cam)
     self.assertEqual(len(defects.keys()), 1)  # One sensor
     for s in defects:
         self.assertEqual(len(defects[s].keys()), 2)  # Two validity ranges
         for d in defects[s]:
             self.assertEqual(len(defects[s][d]), 4)  # Four defects
Example #7
0
    def __init__(self, root, registry=None, compress=True):
        if registry is None:
            registry = os.path.join(root, "registry.sqlite3")
        self.mapper = CfhtMapper(root=root, registry=registry)
        bf = dafPersist.ButlerFactory(mapper=self.mapper)
        self.butler = bf.create()

        self.expFile = CsvFileWriter("Raw_Amp_Exposure.csv", compress=compress)
        self.mdFile = CsvFileWriter("Raw_Amp_Exposure_Metadata.csv",
                                    compress=compress)
        self.rToSFile = CsvFileWriter("Raw_Amp_To_Science_Ccd_Exposure.csv",
                                      compress=compress)
        self.polyFile = open("Raw_Amp_Exposure_Poly.tsv", "wb")
Example #8
0
    def testFilter(self):
        """Test that the same (patched) filter is returned through all Butler
        retrieval paths.
        """
        mapper = MinMapper2(root=ROOT)

        butler = dafPersist.ButlerFactory(mapper=mapper).create()
        image = butler.get("someExp", ccd=35)
        filter = butler.get("someExp_filterLabel", ccd=35)
        # Test only valid with a complete filter
        self.assertEqual(image.getFilterLabel(),
                         afwImage.FilterLabel(band="r", physical="r.MP9601"))
        # Datasets should give consistent answers
        self.assertEqual(filter, image.getFilterLabel())
Example #9
0
 def generateInputList(self):
     with open("ccdlist", "w") as inputFile:
         print >> inputFile, ">intids run camcol field"
         import lsst.daf.persistence as dafPersist
         from lsst.obs.sdss import SdssMapper
         butler = dafPersist.ButlerFactory(mapper=SdssMapper(
             root=self.inputDirectory)).create()
         numInputs = 0
         for frameRef in butler.subset("fpC", "filter"):
             print >>inputFile, "raw", \
                     "run=%(run)d filter=%(filter)s camcol=%(camcol)d field=%(field)d" % \
                     frameRef.dataId
             numInputs += 1
             if numInputs >= self.options.ccdCount:
                 break
         for i in xrange(self.nPipelines):
             print >> inputFile, "raw run=0 filter=0 camcol=0 field=0"
    def testGzImage(self):
        mapper = MinMapper2(root=ROOT)
        loc = mapper.map("someGz", dict(ccd=35))
        expectedLocations = [os.path.join("gz", "bar-35.fits.gz")]
        self.assertEqual(loc.getStorage().root, ROOT)
        self.assertEqual(loc.getLocations(), expectedLocations)

        butler = dafPersist.ButlerFactory(mapper=mapper).create()
        image = butler.get("someGz", ccd=35)
        self.assertEqual(image.getFilter().getName(), "r")

        bbox = geom.BoxI(geom.Point2I(200, 100), geom.Extent2I(300, 400))
        image = butler.get("someGz_sub",
                           ccd=35,
                           bbox=bbox,
                           imageOrigin="LOCAL",
                           immediate=True)
        self.assertEqual(image.getHeight(), 400)
        self.assertEqual(image.getWidth(), 300)
Example #11
0
 def testHdu(self):
     mapper = MinMapper2()
     butler = dafPersist.ButlerFactory(mapper=mapper).create()
     # HDU INT_MIN returns primary array (skipping empty PDU)
     # HDU 0 returns (header-only) PDU
     # HDU 1 returns first image plane
     # HDU 2 returns mask plane
     # HDU 3 returns variance plane
     for i in (1, 2, 3):
         loc = mapper.map("other", dict(ccd=35, hdu=i))
         expectedLocations = ["bar-35.fits[%d]" % (i,)]
         self.assertEqual(loc.getStorage().root, ROOT)
         self.assertEqual(loc.getLocations(), expectedLocations)
         image = butler.get("other", ccd=35, hdu=i, immediate=True)
         self.assertIsInstance(image, lsst.afw.image.ImageF)
         self.assertEqual(image.getHeight(), 2024)
         self.assertEqual(image.getWidth(), 2248)
         self.assertEqual(image[200, 25, lsst.afw.image.LOCAL], (0.0, 20.0, 0.0)[i-1])
         self.assertAlmostEqual(image[200, 26, lsst.afw.image.LOCAL], (1.20544, 0.0, 5.82185)[i-1],
                                places=5)
    def testImage(self):
        mapper = MinMapper2(root=ROOT)
        loc = mapper.map("some", dict(ccd=35))
        expectedLocations = ["bar-35.fits"]
        self.assertEqual(loc.getStorage().root, ROOT)
        self.assertEqual(loc.getLocations(), expectedLocations)

        butler = dafPersist.ButlerFactory(mapper=mapper).create()
        image = butler.get("some", ccd=35)
        self.assertEqual(image.getFilter().getName(), "r")

        self.assertEqual(butler.get("some_bbox", ccd=35), image.getBBox())

        bbox = afwGeom.BoxI(afwGeom.Point2I(200, 100),
                            afwGeom.Extent2I(300, 400))
        image = butler.get("some_sub",
                           ccd=35,
                           bbox=bbox,
                           imageOrigin="LOCAL",
                           immediate=True)
        self.assertEqual(image.getHeight(), 400)
        self.assertEqual(image.getWidth(), 300)
Example #13
0
    def testEndToEnd(self):
        """Test ISR, CcdAssembly, CrSplit, ImgChar, SFM pipelines"""

        #Setup up astrometry_net_data
        # Note - one of datarel's dependencies causes setup of
        #        'astrometry_net_data cfhttemplate' version; 
        #        datarel needs imsim_*.
        ver = 'imsim-2010-12-17-1'
        print "Setting up astrometry_net_data", ver
        # XXX what is actually used from this setup -- a path in the env?
        ok, version, reason = eups.Eups().setup("astrometry_net_data", versionName=ver)
        if not ok:
            raise ValueError("Couldn't set up version '%s' of astrometry_net_data: %s" % (ver, reason))

        afwdataDir = lsst.utils.getPackageDir("afwdata")
        inputRoot = os.path.join(afwdataDir, "ImSim")
        if os.path.exists("endToEnd.py"):
            outputRoot = "."
        else:
            outputRoot = "tests"

        registryPath = os.path.join(inputRoot, "registry.sqlite3")

        bf = dafPersist.ButlerFactory(mapper=LsstSimMapper(root=inputRoot))
        inButler = bf.create()
        obf = dafPersist.ButlerFactory(mapper=LsstSimMapper(root=outputRoot,
            registry=registryPath))
        outButler = obf.create()

        stat = subprocess.call(["runImSim.py", "-T", "--force",
            "-i", inputRoot, "-o", outputRoot,
            "-v", "85408556", "-r", "2,3", "-s", "1,1"])
        self.assertEqual(stat, 0, "Error while running end to end test")

        fname = "psf/v85408556-fr/R23/S11.boost"
        stat = subprocess.call(["cmp",
            os.path.join(outputRoot, fname), os.path.join(inputRoot, fname)])

        psfDiffers = (stat != 0)
        if psfDiffers:
            print 'PSF differs (but carrying on and failing later...)'

        results = []
        
        for datasetType in ("icSrc", "src", "calexp"):
            msg = compare(outButler, inButler, datasetType,
                    visit=85408556, raft="2,3", sensor="1,1")
            results.append((datasetType, msg))
            if msg is not None:
                print 'Dataset type', datasetType, 'differs (but carrying on and failing later...)'
                print 'message:', msg

        for snap in (0, 1):
            msg = compare(outButler, inButler, "sdqaCcd",
                visit=85408556, snap=snap, raft="2,3", sensor="1,1")
            results.append(('sdqaCcd snap %i' % snap, msg))
            if msg is not None:
                print 'Snap', snap, 'sdqaCCD differs (but carrying on and failing later...)'
                print 'message:', msg
            for channel in inButler.queryMetadata("raw", "channel"):
                msg = compare(outButler, inButler, "sdqaAmp",
                    visit=85408556, snap=snap, raft="2,3", sensor="1,1",
                    channel=channel)
                print 'channel:', channel
                results.append(('sdqaAmp snap %i channel ' % (snap) + str(channel), msg))
                if msg is not None:
                    print 'Snap', snap, 'channel', channels, 'sdqaAmp differs (but carrying on and failing later...)'
                    print 'message:', msg

        # Deferred failure!
        self.assertFalse(psfDiffers)
        for datasetType,msg in results:
            self.assert_(msg is None, msg)
Example #14
0
def getButler(datadir, mit=False):
    bf = dafPersist.ButlerFactory(
        mapper=SuprimecamMapper(mit=mit,
                                root=os.path.join(datadir, "science"),
                                calibRoot=os.path.join(datadir, "calib")))
    return bf.create()
 def testDetector(self):
     mapper = MinMapper2(root=ROOT)
     butler = dafPersist.ButlerFactory(mapper=mapper).create()
     detector = butler.get("raw_detector", ccd=0)
     self.assertEqual(detector.getName(), "ccd00")
Example #16
0
def getButler(datadir):
    bf = dafPersist.ButlerFactory(mapper=HscSimMapper(root=os.path.join(datadir, "hsc")))
    return bf.create()
Example #17
0
def _getSuprimeButler(rootdir=None, calibdir=None, outrootdir=None):
    mapper = _getSuprimeMapper(rootdir, calibdir, outrootdir)
    butlerFactory = dafPersist.ButlerFactory(mapper=mapper)
    butler = butlerFactory.create()
    return butler
Example #18
0
def main():
    # Setup command line options
    usage = dedent("""\
    usage: %prog [options] <kind> <db> <inputRoot>

    Verifies sky-tile to raw amp mapping in registry

    <kind>:       Input dataset, one of 'imsim' or 'cfhtls'
    <db>:         Run database name
    <inputRoot>:  Input root
    """)
    parser = optparse.OptionParser(usage)
    parser.add_option(
        "-u",
        "--user",
        dest="user",
        default="serge",
        help="Database user name to use when connecting to MySQL.")
    parser.add_option(
        "-s",
        "--server",
        dest="server",
        default="lsst10.ncsa.uiuc.edu:3306",
        help="host:port of MySQL server to connect to; defaults to %default")
    parser.add_option("-r",
                      "--registry",
                      dest="registry",
                      default=None,
                      help="Input registry for butler")
    opts, args = parser.parse_args()
    if len(args) != 3:
        parser.error("Invalid number of arguments")
    kind = args[0].lower()
    if kind not in ('imsim', 'cfhtls'):
        parser.error("Input dataset must be one of 'imsim' or 'cfhtls'")
    registry = opts.registry or os.path.join(args[2], "registry.sqlite3")
    if kind == 'imsim':
        mapper = LsstSimMapper(root=args[2], calibRoot=None, registry=registry)
    else:
        mapper = CfhtMapper(root=args[2], calibRoot=None, registry=registry)
    inButler = dafPersist.ButlerFactory(mapper=mapper).create()
    passwd = getpass.getpass()
    host, port = hostPort(opts.server)
    qsp = qs.createQuadSpherePixelization()
    conn = sql.connect(host=host,
                       port=port,
                       user=opts.user,
                       passwd=passwd,
                       db=args[1])
    try:
        cursor = conn.cursor()
        try:
            print "Reading WCSes for all Science CCDs in run"
            wcsMap, wcsList = getAllSipWcs(cursor, qsp, kind)
            print "Verifying sky-tile mapping in input registry :"
            print "=============================================="
            verifySkyTiles(cursor, qsp, kind, wcsMap, wcsList, inButler)
        finally:
            cursor.close()
    finally:
        conn.close()
Example #19
0
 def setUp(self):
     self.bf = dafPersist.ButlerFactory(mapper=SdssMapper(root="."))
     self.butler = self.bf.create()
    #if name == 'lsst.meas.algorithms.measurement':
    #    di.display = True
    if name == '__main__':
        #di.display = True
        pass
    return di


lsstDebug.Info = MyInfo

if __name__ == '__main__':
    basedir = os.path.join(os.environ['HOME'], 'lsst', 'ACT-data')
    mapperArgs = dict(root=os.path.join(basedir, 'rerun/dstn'),
                      calibRoot=os.path.join(basedir, 'CALIB'))
    mapper = obsSc.SuprimecamMapper(**mapperArgs)
    butlerFactory = dafPersist.ButlerFactory(mapper=mapper)
    butler = butlerFactory.create()
    print 'Butler', butler
    dataRef = butler.subset('raw', dataId=dict(visit=126969, ccd=5))
    print 'dataRef:', dataRef
    #dataRef.butlerSubset = dataRef
    #print 'dataRef:', dataRef
    print 'len(dataRef):', len(dataRef)
    for dr in dataRef:
        print '  ', dr

    conf = procCcd.ProcessCcdConfig()
    conf.measurement.doRemoveOtherSources = True

    conf.doDetection = True
    conf.doMeasurement = True
Example #21
0
def main(dataDir,
         visit,
         title="",
         outputTxtFileName=None,
         showFwhm=False,
         minFwhm=None,
         maxFwhm=None,
         correctDistortion=False,
         showEllipticity=False,
         ellipticityDirection=False,
         showNdataFwhm=False,
         showNdataEll=False,
         minNdata=None,
         maxNdata=None,
         gridPoints=30,
         verbose=False):

    butler = dafPersist.ButlerFactory(mapper=hscSim.HscSimMapper(
        root=dataDir)).create()
    camera = butler.get("camera")

    if not (showFwhm or showEllipticity or showNdataFwhm or showNdataEll
            or outputTxtFileName):
        showFwhm = True
    #
    # Get a dict of cameraGeom::Ccd indexed by serial number
    #
    ccds = {}
    for raft in camera:
        for ccd in raft:
            ccd.setTrimmed(True)
            ccds[ccd.getId().getSerial()] = ccd
    #
    # Read all the tableSeeingMap files, converting their (x, y) to focal plane coordinates
    #
    xArr = []
    yArr = []
    ellArr = []
    fwhmArr = []
    paArr = []
    aArr = []
    bArr = []
    e1Arr = []
    e2Arr = []
    elle1e2Arr = []
    for tab in butler.subset("tableSeeingMap", visit=visit):
        # we could use tab.datasetExists() but it prints a rude message
        fileName = butler.get("tableSeeingMap_filename", **tab.dataId)[0]
        if not os.path.exists(fileName):
            continue

        with open(fileName) as fd:
            ccd = None
            for line in fd.readlines():
                if re.search(r"^\s*#", line):
                    continue
                fields = [float(_) for _ in line.split()]

                if ccd is None:
                    ccd = ccds[int(fields[0])]

                x, y, fwhm, ell, pa, a, b = fields[1:8]
                x, y = ccd.getPositionFromPixel(afwGeom.PointD(x, y)).getMm()
                xArr.append(x)
                yArr.append(y)
                ellArr.append(ell)
                fwhmArr.append(fwhm)
                paArr.append(pa)
                aArr.append(a)
                bArr.append(b)
                if len(fields) == 11:
                    e1 = fields[8]
                    e2 = fields[9]
                    elle1e2 = fields[10]
                else:
                    e1 = -9999.
                    e2 = -9999.
                    elle1e2 = -9999.
                e1Arr.append(e1)
                e2Arr.append(e2)
                elle1e2Arr.append(elle1e2)

    xArr = np.array(xArr)
    yArr = np.array(yArr)
    ellArr = np.array(ellArr)
    fwhmArr = np.array(fwhmArr) * 0.168  # arcseconds
    paArr = np.radians(np.array(paArr))
    aArr = np.array(aArr)
    bArr = np.array(bArr)

    e1Arr = np.array(e1Arr)
    e2Arr = np.array(e2Arr)
    elle1e2Arr = np.array(elle1e2Arr)

    if correctDistortion:
        import lsst.afw.geom.ellipses as afwEllipses

        dist = camera.getDistortion()
        for i in range(len(aArr)):
            axes = afwEllipses.Axes(aArr[i], bArr[i], paArr[i])
            if False:  # testing only!
                axes = afwEllipses.Axes(1.0, 1.0, np.arctan2(yArr[i], xArr[i]))
            quad = afwEllipses.Quadrupole(axes)
            quad = quad.transform(
                dist.computeQuadrupoleTransform(
                    afwGeom.PointD(xArr[i], yArr[i]), False))
            axes = afwEllipses.Axes(quad)
            aArr[i], bArr[i], paArr[i] = axes.getA(), axes.getB(
            ), axes.getTheta()

        ellArr = 1 - bArr / aArr

    if len(xArr) == 0:
        gridPoints = 0
        xs, ys = [], []
    else:
        N = gridPoints * 1j
        extent = [min(xArr), max(xArr), min(yArr), max(yArr)]
        xs, ys = np.mgrid[extent[0]:extent[1]:N, extent[2]:extent[3]:N]

    title = [
        title,
    ]

    title.append("\n#")

    if outputTxtFileName:
        f = open(outputTxtFileName, 'w')
        f.write("# %s visit %s\n" % (" ".join(title), visit))
        for x, y, ell, fwhm, pa, a, b, e1, e2, elle1e2 in zip(
                xArr, yArr, ellArr, fwhmArr, paArr, aArr, bArr, e1Arr, e2Arr,
                elle1e2Arr):
            f.write('%f %f %f %f %f %f %f %f %f %f\n' %
                    (x, y, ell, fwhm, pa, a, b, e1, e2, elle1e2))

    if showFwhm:
        title.append("FWHM (arcsec)")
        if len(xs) > 0:
            fwhmResampled = griddata(xArr, yArr, fwhmArr, xs, ys)
            plt.imshow(fwhmResampled.T,
                       extent=extent,
                       vmin=minFwhm,
                       vmax=maxFwhm,
                       origin='lower')
            plt.colorbar()

        if outputTxtFileName:

            ndataGrids = getNumDataGrids(xArr, yArr, fwhmArr, xs, ys)

            f = open(outputTxtFileName + '-fwhm-grid.txt', 'w')
            f.write("# %s visit %s\n" % (" ".join(title), visit))
            for xline, yline, fwhmline, ndataline in zip(
                    xs.tolist(), ys.tolist(), fwhmResampled.tolist(),
                    ndataGrids):
                for xx, yy, fwhm, ndata in zip(xline, yline, fwhmline,
                                               ndataline):
                    if fwhm is None:
                        fwhm = -9999
                    f.write('%f %f %f %d\n' % (xx, yy, fwhm, ndata))

    elif showEllipticity:
        title.append("Ellipticity")
        scale = 4

        if ellipticityDirection:  # we don't care about the magnitude
            ellArr = 0.1

        u = -ellArr * np.cos(paArr)
        v = -ellArr * np.sin(paArr)
        if gridPoints > 0:
            u = griddata(xArr, yArr, u, xs, ys)
            v = griddata(xArr, yArr, v, xs, ys)
            x, y = xs, ys
        else:
            x, y = xArr, yArr

        Q = plt.quiver(
            x,
            y,
            u,
            v,
            scale=scale,
            pivot="middle",
            headwidth=0,
            headlength=0,
            headaxislength=0,
        )
        keyLen = 0.10
        if not ellipticityDirection:  # we care about the magnitude
            plt.quiverkey(Q, 0.20, 0.95, keyLen, "e=%g" % keyLen, labelpos='W')

        if outputTxtFileName:
            ndataGrids = getNumDataGrids(xArr, yArr, ellArr, xs, ys)

            f = open(outputTxtFileName + '-ell-grid.txt', 'w')
            f.write("# %s visit %s\n" % (" ".join(title), visit))
            #f.write('# %f %f %f %f %f %f %f\n' % (x, y, ell, fwhm, pa, a, b))
            for xline, yline, uline, vline, ndataline in zip(
                    x.tolist(), y.tolist(), u.tolist(), v.tolist(),
                    ndataGrids):
                for xx, yy, uu, vv, ndata in zip(xline, yline, uline, vline,
                                                 ndataline):
                    if uu is None:
                        uu = -9999
                    if vv is None:
                        vv = -9999
                    f.write('%f %f %f %f %d\n' % (xx, yy, uu, vv, ndata))

    elif showNdataFwhm:
        title.append("N per fwhm grid")
        if len(xs) > 0:
            ndataGrids = getNumDataGrids(xArr, yArr, fwhmArr, xs, ys)
            plt.imshow(ndataGrids,
                       interpolation='nearest',
                       extent=extent,
                       vmin=minNdata,
                       vmax=maxNdata,
                       origin='lower')
            plt.colorbar()
        else:
            pass

    elif showNdataEll:
        title.append("N per ell grid")
        if len(xs) > 0:
            ndataGrids = getNumDataGrids(xArr, yArr, ellArr, xs, ys)
            plt.imshow(ndataGrids,
                       interpolation='nearest',
                       extent=extent,
                       vmin=minNdata,
                       vmax=maxNdata,
                       origin='lower')
            plt.colorbar()
        else:
            pass

    #plt.plot(xArr, yArr, "r.")
    #plt.plot(xs, ys, "b.")
    plt.axes().set_aspect('equal')
    plt.axis([-20000, 20000, -20000, 20000])

    def frameInfoFrom(filepath):
        import pyfits
        with pyfits.open(filepath) as hdul:
            h = hdul[0].header
            'object=ABELL2163 filter=HSC-I exptime=360.0 alt=62.11143274 azm=202.32265181 hst=(23:40:08.363-23:40:48.546)'
            return 'object=%s filter=%s exptime=%.1f azm=%.2f hst=%s' % (
                h['OBJECT'], h['FILTER01'], h['EXPTIME'], h['AZIMUTH'],
                h['HST'])

    title.insert(
        0,
        frameInfoFrom(
            butler.get('raw_filename', {
                'visit': visit,
                'ccd': 0
            })[0]))
    title.append(r'$\langle$FWHM$\rangle %4.2f$"' % np.median(fwhmArr))
    plt.title("%s visit=%s" % (" ".join(title), visit), fontsize=9)

    return plt
Example #22
0
def getInputButler(opt):
    inmapper = LsstSimMapper(root=opt.inRoot, registry=opt.registry)
    bf = dafPersist.ButlerFactory(mapper=inmapper)
    inButler = bf.create()
    return inButler
Example #23
0
def getButler(datadir):
    bf = dafPersist.ButlerFactory(
        mapper=DecamMapper(root=os.path.join(datadir, "DATA"),
                           calibRoot=os.path.join(datadir, "CALIB")))
    return bf.create()
Example #24
0
 def setUp(self):
     butler = dafPersist.ButlerFactory(mapper=BaseMapper(ROOT)).create()
     self.cam = butler.get('camera')
     self.qe_path = os.path.join(ROOT, 'trivial_camera', 'qe_curve')
     self.tmp_dir_obj = tempfile.TemporaryDirectory()
Example #25
0
 def setUp(self):
     butler = dafPersist.ButlerFactory(mapper=BaseMapper(ROOT)).create()
     self.cam = butler.get('camera')
     self.defects_path = os.path.join(ROOT, 'trivial_camera', 'defects')
Example #26
0
def getOutputButler(opt):
    outmapper = LsstSimMapper(root=opt.outRoot, registry=opt.registry)
    bf = dafPersist.ButlerFactory(mapper=outmapper)
    outButler = bf.create()
    return outButler
Example #27
0
    def __init__(self, label, rerun, cameraInfo, dataDir, **kwargs):
        """
        @param label A label to refer to the data
        @param rerun The rerun to retrieve
        @param cameraInfo A cameraInfo object describing the camera for these data
        @param dataDir The full path to the directory containing the data registry file.
        
        @param haveManifest verify files in dataDir are present according to manifest
        @param verifyChecksum verify files in dataDir have correct checksum as listed in manifest
        """
        
        QaData.__init__(self, label, rerun, cameraInfo)
        self.rerun = rerun
        self.dataDir = dataDir


        ###############################################
        # handle keyword args
        ###############################################
        self.kwargs      = kwargs
        self.dataId         = self.kwargs.get('dataId', {})
        self.haveManifest   = self.kwargs.get('haveManifest', False)
        self.verifyChecksum = self.kwargs.get('verifyChecksum', False)
        self.shapeAlg       = self.kwargs.get('shapeAlg', 'HSM_REGAUSS')

        knownAlgs = ["HSM_REGAUSS", "HSM_BJ", "HSM_LINEAR", "HSM_SHAPELET", "HSM_KSB"]
        if not self.shapeAlg in set(knownAlgs):
            knownStr = "\n".join(knownAlgs)
            raise Exception("Unknown shape algorithm: %s.  Please choose: \n%s\n" % (self.shapeAlg, knownStr))

        ###############################################
        # check the manifest, if requested
        # haveManifest = True is a bit slowish
        # verifyChecksum = True is quite slow
        manifest.verifyManifest(self.dataDir, verifyExists=self.haveManifest,
                                verifyChecksum=self.verifyChecksum)


        # This (dataId fetching) needs a better design, but will require butler/mapper change, I think.
        #
        # these obscure things refer to the names assigned to levels in the data hierarchy
        # eg. for lsstSim:   dataInfo  = [['visit',1], ['snap', 0], ['raft',0], ['sensor',0]]
        # a level is considered a discriminator if it represents different pictures of the same thing
        # ... so the same object may appear in multiple 'visits', but not on multiple 'sensors'
        # dataInfo is passed in from the derived class as it's specific to each mapper
        
        dataIdRegexDict = {}
        for array in self.dataInfo:
            dataIdName, dataIdDiscrim = array

            # if the user requested eg. visit=1234.*
            # pull that out of kwargs and put it in dataIdRegexDict
            if self.dataId.has_key(dataIdName):
                dataIdRegexDict[dataIdName] = self.dataId[dataIdName]
                

        #######################################
        # get butler
        self.outMapper = self.cameraInfo.getMapper(self.dataDir, rerun=self.rerun)
        self.outButler = dafPersist.ButlerFactory(mapper=self.outMapper).create()

        
        ####################################################
        # make a list of the frames we're asked to care about

        # get all the available raw inputs
        self.availableDataTuples = self.outButler.queryMetadata(cameraInfo.rawName, self.dataIdNames,
                                                                format=self.dataIdNames)

        # availableDataTuples may be a *very* *large* list.  Be sure to call reduceAvailableDataTupleList
        self.dataTuples = self.availableDataTuples
    def __init__(self, label, rerun, mapperClass, dataInfo, defaultConfig, kwargs):
        """
        keyword args:
        haveManifest = boolean, verify files in dataDir are present according to manifest
        verifyChecksum = boolean, verify files in dataDir have correct checksum as listed in manifest
        astrometryNetData = eups package name for astrometryNetData package to use
        """

        ###############################################
        # handle inputs
        ###############################################
        self.label         = label
        self.dataIdNames   = []
        self.dataIdDiscrim = []
        self.defaultConfig = defaultConfig
        roots = self.defaultConfig['roots']

        self.kwargs         = kwargs
        self.dataIdRegex    = self.kwargs.get('dataId', {})
        self.haveManifest   = self.kwargs.get('haveManifest', False)
        self.verifyChecksum = self.kwargs.get('verifyChecksum', False)
        self.astrometryNetData = self.kwargs.get('astrometryNetData', None)

        self.rerun = rerun
        
        ##################
        # output directory

        # if the user provided one, use it ... otherwise use the default
        self.outDir = kwargs.get('outDir', roots['output'])
        self.testdataDir = roots['data']
        self.calibDir    = roots['calib']

        # need a separate output dir for things we generate outside the pipe (logs, etc)
        self.localOutDir = os.path.join(os.getcwd(), self.label+"out")
        if not os.path.exists(self.localOutDir):
            os.mkdir(self.localOutDir)
        
        # allow a short hand for 'write outputs locally' ... use the word 'local'
        if re.search("(local|\.)", self.outDir):
            self.outDir = self.localOutDir
        if not os.path.exists(self.outDir):
            os.mkdir(self.outDir)
        roots['output'] = self.outDir


        # This (dataId fetching) needs a better design, but will require butler/mapper change, I think.
        #
        # these obscure things refer to the names assigned to levels in the data hierarchy
        # eg. for lsstSim:   dataInfo  = [['visit',1], ['snap', 0], ['raft',0], ['sensor',0]]
        # a level is considered a discriminator if it represents different pictures of the same thing
        # ... so the same object may appear in multiple 'visits', but not on multiple 'sensors'
        # dataInfo is passed in from the derived class as it's specific to each mapper
        for array in dataInfo:
            dataIdName, dataIdDiscrim = array
            self.dataIdNames.append(dataIdName)
            self.dataIdDiscrim.append(dataIdDiscrim)

        # keep a list of any:
        #  - logfiles we write,
        #  - eups setup list files
        # ...so we can provide it to be imported by a TestSet
        self.logDir = os.path.join(self.localOutDir, "log")
        self.logFiles = []
        self.eupsSetupFiles = []
        

        
        ##########################
        # load the manifest and verify the checksum (if we're asked to ... it's slower)
        # haveManifest = True is a bit slowish
        # verifyChecksum = True is quite slow
        if self.haveManifest:
            manifest = Manifest(self.testdataDir)
            manifest.read()
            missingInputs   = manifest.verifyExists()
            if self.verifyChecksum:
                failedChecksums = manifest.verifyChecksum()

            msg = ""
            if (len(missingInputs) > 0):
                msg = "Missing input files listed in manifest:\n"
                msg += "\n".join(missingInputs) + "\n"
            if self.verifyChecksum and (len(failedChecksums) > 0):
                msg += "Failed checksums:\n"
                msg += "\n".join(failedChecksums) + "\n"
            if len(msg) > 1:
                raise Exception(msg)

                    

        #########################
        # see if setup changed
        # we should rerun our data if the user has setup against different packages
        print "Warning: Setup change verification not yet implemented."
            

        #######################################
        # get i/o butlers
        registry = os.path.join(self.testdataDir, 'registry.sqlite3')
        # note: only suprime/hsc mappers accept rerun arg
        if self.rerun is None:
            self.inMapper  = mapperClass(root=self.testdataDir, calibRoot=self.calibDir)
            self.outMapper = mapperClass(root=self.outDir, calibRoot=self.calibDir, registry=registry)
        else:
            self.inMapper  = mapperClass(rerun=self.rerun, root=self.testdataDir, calibRoot=self.calibDir)
            self.outMapper = mapperClass(rerun=self.rerun,
                                         root=self.outDir, calibRoot=self.calibDir, registry=registry)
        self.inButler  = dafPersist.ButlerFactory(mapper=self.inMapper).create()
        self.outButler = dafPersist.ButlerFactory(mapper=self.outMapper).create()

        
        ####################################################
        # make a list of the frames we're asked to care about

        # get all the available raw inputs
        self.availableDataTuples = self.inButler.queryMetadata('raw', self.dataIdNames,
                                                               format=self.dataIdNames)

        # of the data available, get a list of the ones the user actually wants us
        #  to run.  A bit sketchy here ... kwargs contains non-idname info as well.
        self.dataTuples = self._regexMatchDataIds(self.dataIdRegex, self.availableDataTuples)


        # if/when we run, we'll store tracebacks for any failed runs
        # we don't want to stop outright, but we should report failures
        self.uncaughtExceptionDict = {}
Example #29
0
def getButler(datadir):
    mapper = WhircMapper(root=os.path.join(datadir, "raw"),
                         calibRoot=os.path.join(datadir, "calib"))
    bf = dafPersist.ButlerFactory(mapper=mapper)
    return bf.create()