def doQueryType(t, curMD, tNum, cSize=cSize, obsId=obsId):
    print 'NOW QUERYING:', t
    myQDB = queryDB.queryDB(chunksize=cSize, objtype=t)
    ic = myQDB.getInstanceCatalogById(obsId)
    while ic != None:
        curMD = doIC(ic, myQDB, curMD, tNum)
        tNum += 1
        ic = myQDB.getNextChunk()
    return curMD, tNum, myQDB
Ejemplo n.º 2
0
def doQueryType(t, curMD, tNum, cSize=cSize, obsId=obsId):
    print 'NOW QUERYING:', t
    myQDB = queryDB.queryDB(chunksize=cSize, objtype=t)
    ic = myQDB.getInstanceCatalogById(obsId)
    while ic != None:
        curMD = doIC(ic, myQDB, curMD, tNum)
        tNum += 1
        ic = myQDB.getNextChunk()
    return curMD, tNum, myQDB
def runDia(csize, obsidList, radius=2.1, outdir='.', repodir=None, je=None, compress=True, cleanup=False):
    if repodir is None:
        repodir = outdir
    meta = None
    opsimid = None
    writeJobEvent(je, 'start')
    cattype = "DIASOURCE"
    objtype = 'SSM'
    warnings.simplefilter('ignore', category=exceptions.UserWarning)
    arcroot = "diaBatchStarting_%i"%(obsidList[0])
    outBase = os.path.join(outdir, arcroot)
    if cleanup:
        cleanUpDirs(outBase, je)
    if not os.path.exists(outBase):
        os.makedirs(outBase)
    writeJobEvent(je, 'MakeDirs', 'Made output directories %s'%(outBase))
    nid = 0
    for obsid in obsidList:
        writeJobEvent(je, 'Obshistid:%s'%(obsid), 'Doing %i out of %i total'%(nid, len(obsidList)))
        filename = "dia_%i.dat"%(obsid)
        outfile = os.path.join(outBase,filename)
        myqdb = queryDB.queryDB(chunksize=csize,objtype=objtype,filetypes=(cattype,))
        ic = myqdb.getInstanceCatalogById(obsid, radiusdeg=radius)        
        if opsimid is None:
            opsimid = myqdb.opsim
        cnum = 0
        while ic is not None:
            writeJobEvent(je, 'GetChunk', 'Got chunk #%i of length %i'%(cnum, len(ic.dataArray[ic.dataArray.keys()[0]])))
            numRec = len(ic.dataArray[ic.dataArray.keys()[0]])
            if cnum == 0:
                ic.metadata.validateMetadata(cattype, opsimid)
                ic.metadata.writeMetadata(outfile, cattype, opsimid, newfile=True, filelist=None, compress=compress)
                writeJobEvent(je, 'WriteMetadata', 'Wrote metadata to %s'%(outfile))
            ic.validateData(cattype)
            ic.writeCatalogData(outfile, cattype, newfile = False, compress=compress)
            writeJobEvent(je, 'WriteChunk', 'Wrote chunk #%i of length %i'%(cnum,numRec))
            ic = myqdb.getNextChunk()
            cnum += 1
        myqdb.closeSession()
        nid += 1
    mvFiles(repodir, outBase, arcroot, je=je)
    if cleanup:
        cleanUpDirs(outBase, je)
    writeJobEvent(je, 'stop')
def runCalib(csize, obsid, radius=2.1, outdir='.', repodir=None, je=None, compress=True, cleanup=False):
    if repodir is None:
        repodir = outdir
    meta = None
    opsimid = None
    files = []
    writeJobEvent(je, 'start')
    cattype = "CALIB"
    print "Warning -- only MSSTARS in calib cats currently"
    #objtypes = ['MSSTARS','WDSTARS','BHBSTARS','RRLYSTARS', \
    #            'GLENS','IMAGE','EBSTARS','CEPHEIDSTARS','EASTEREGGS','GALAXY_BULGE','GALAXY_DISK','AGN']
    #varobj = ['MSSTARS', 'RRLYSTARS', 'AGN', 'IMAGE', 'WDSTARS', 'EBSTARS', 'CEPHEIDSTARS']    
    objtypes = ['MSSTARS', ]
    varobj = ['MSSTARS',]
    warnings.simplefilter('ignore', category=exceptions.UserWarning)
    arcroot = "obsid%i"%(obsid)
    outBase = os.path.join(outdir, arcroot)
    if cleanup:
        cleanUpDirs(outBase, je)
    subdir = "pops"
    popsPath = os.path.join(outBase, subdir)
    if not os.path.exists(popsPath):
        os.makedirs(popsPath)
    writeJobEvent(je, 'MakeDirs', 'Made output directories %s'%(popsPath))
    files = []
    for objtype in objtypes:
        writeJobEvent(je, 'Object:%s'%(objtype), 'Doing %s out of: %s'%(objtype, ",".join(objtypes)))
        filename = "trim_%i_%s.dat"%(obsid,objtype)
        outfile = os.path.join(popsPath,filename)
        myqdb = queryDB.queryDB(chunksize=csize,objtype=objtype)
        # This grabs the metadata and grabs the catalog in a circle around central pointing.
        #  WHY COMBINED? DO WE NEED TO SPLIT TO PROVIDE ADDITIONAL METADATA EASILY?
        ic = myqdb.getInstanceCatalogById(obsid, radiusdeg=radius)        
        if opsimid is None:
            opsimid = myqdb.opsim
        cnum = 0
        while ic is not None:
            writeJobEvent(je, 'GetChunk', 'Got chunk #%i of length %i'%(cnum, len(ic.dataArray[ic.dataArray.keys()[0]])))
            # Calculate focal plane mm/mm and chip x/y for each object.
            ic.makeXyCoords()
            writeJobEvent(je, 'MakeXY', 'Made xy coords for chunk #%i'%(cnum))
            # Calculate calib counts for each object.
            ic.calcCalibCounts()
            writeJobEvent(je, 'calcCalibCounts', 'Generated throughput curve and calculated counts for chunk #%i' %(cnum))
            if cnum == 0:
                if compress:
                    files.append(os.path.join(subdir,filename)+".gz")
                else:
                    files.append(os.path.join(subdir,filename))
                # Okay, need to understand this a little more ... think it's mostly formatting/writing metadata.
                # NEED TO WRITE A NEW VERSION?
                mUtils.trimGeneration.derivedTrimMetadata(ic)
                if meta is None:
                    meta = deepcopy(ic.metadata)
                else:
                    meta.mergeMetadata(ic.metadata)
            if objtype in varobj:
                writeJobEvent(je, 'DoVar', 'Applying variability to chunk #%i of type %s'%(cnum, objtype))
                ic.applyVariability()
            # WILL NEED SOMETHING HERE FOR CALIB CAT
            ic.validateData('TRIM')
            numRec = len(ic.dataArray[ic.dataArray.keys()[0]])
            if cnum == 0:
                ic.writeCatalogData(outfile, "TRIM", newfile = True, compress=compress)
                writeJobEvent(je, 'WriteChunk', 'Wrote first chunk of length %i'%(numRec))
            else:
                ic.writeCatalogData(outfile, "TRIM", newfile = False, compress=compress)
                writeJobEvent(je, 'WriteChunk', 'Wrote chunk #%i of length %i'%(cnum,numRec))
            if numRec == csize:
                ic = myqdb.getNextChunk()
            else:
                ic = None
            cnum += 1
        writeJobEvent(je, 'Finished Object:%s'%(objtype), 'Finished object %s'%(objtype))
    meta.validateMetadata(cattype, opsimid)
    metaOutfile = os.path.join(outBase,"metadata_%i.dat"%(obsid))
    meta.writeMetadata(metaOutfile, cattype, opsimid, newfile=True, filelist=files, compress=False)
    #files.append(os.path.join("obsid%i"%(obsid),"metadata_%i.dat"%(obsid)))
    writeJobEvent(je, 'WriteMetadata', 'Wrote metadata to %s'%(metaOutfile))
    mvFiles(repodir, outBase, arcroot, je=je)
    if cleanup:
        cleanUpDirs(outBase, je)
    writeJobEvent(je, 'stop')
Ejemplo n.º 5
0
#!/usr/bin/env python
import copy
from lsst.sims.catalogs.generation.db import queryDB
from lsst.sims.catalogs.measures.astrometry import Bbox

if __name__ == "__main__":
    csize = 10
    objects = ("GALAXY", )
    myqdb = queryDB.queryDB(chunksize=csize, objtype="GALAXY")
    ic = myqdb.getInstanceCatalogById(85748128)
    print ic.metadata.parameters
    curMD = copy.deepcopy(ic.metadata)
    curMD.validateMetadata('TRIM')
#!/usr/bin/env python
from lsst.sims.catalogs.generation.db import queryDB
from lsst.sims.catalogs.measures.astrometry import Bbox

if __name__ == "__main__":
    csize = 10
    objects = ("GALAXY",)
    myqdb = queryDB.queryDB(chunksize=csize, objtype="GALAXY")
    ic = myqdb.getInstanceCatalogById(85748128)
    print ic.metadata.parameters
Ejemplo n.º 7
0
#!/usr/bin/env python
import numpy, math, copy, cPickle, os, sys
from lsst.sims.catalogs.generation.db import queryDB
from lsst.sims.catalogs.measures.astrometry import Bbox
import lsst.sims.catalogs.measures.utils as mUtils

csize = 10
myqdb = queryDB.queryDB(chunksize=csize, objtype=sys.argv[1])
ic = myqdb.getInstanceCatalogById(85748128)
print ic.metadata.parameters
mUtils.trimGeneration.derivedTrimMetadata(ic)
ic.calculateUnrefractedAltAz()
print ic.metadata.parameters
curMD = copy.deepcopy(ic.metadata)
curMD.writeMetadata('testMetadata.dat', 'TRIM', myqdb.opsim, newfile=True)
cPickle.dump(ic, open('test.pkl', 'w'))
#ic.makeHelio()
ic.makeTrimCoords()
ic.metadata.validateMetadata('TRIM', myqdb.opsim)
print ic.dataArray
ic.validateData('TRIM')
ic.writeCatalogData('iCTest.txt', 'TRIM')
ic.metadata.writeMetadata('iCTest.meta', 'TRIM', myqdb.opsim, newfile=True)
Ejemplo n.º 8
0
#!/usr/bin/env python
import copy
from lsst.sims.catalogs.generation.db import queryDB
from lsst.sims.catalogs.measures.astrometry import Bbox

if __name__ == "__main__":
    csize = 10
    myqdb = queryDB.queryDB(chunksize=csize, objtype="SSM")
    ic = myqdb.getInstanceCatalogById(85748128)
    print ic.metadata.parameters
    curMD = copy.deepcopy(ic.metadata)
    curMD.validateRequiredMetadata('TRIM', myqdb.opsim)
Ejemplo n.º 9
0
    def doOneCatalogType(self, catalogType, queryTypes, obsHistID):
        #nFN = self.getNextGoodFileNum()
        fullTimeStart = time.time()
        self.executionDBManager = jobDB.JobState()
        t0 = self.executionDBManager.getJobId()

        nFN = '%s_%s' % (t0.getOwner(), t0.getId())
        print 'Using job ID: %s' % nFN
        print 'queryTypes:', queryTypes
        jobNum = 0
        jobTypes = []; jobNums = []; jobPickleFiles = []; useTypes = []
        allOutputFiles = []; curMD = None
        self.metaDataManager.reset()
        os.system('free -m')
        for objectType in queryTypes:
            if objectType not in useTypes: useTypes.append(objectType)
            print 'Getting first %s instance catalog of size %i...' % (
                objectType, self.chunkSize)
            t0 = time.time()
            myQDB = queryDB.queryDB(
                chunksize=self.chunkSize, objtype=objectType)
            print '   ...setting up QDB took %i sec.' % (time.time() - t0)
            t0 = time.time()
            instanceCat = myQDB.getInstanceCatalogById(obsHistID)
            print '   ...and getting catalog took %i sec.' % (time.time() - t0)

            numCats = 0
            if instanceCat != None:
                # This code adds some needed fields to the metadata
                mUtils.trimGeneration.derivedTrimMetadata(instanceCat)
                os.system('free -m')
                # Deep copy so we can store this after instanceCat disappears
                if curMD == None:
                    curMD = copy.deepcopy(instanceCat.metadata)
                else:
                    curMD.mergeMetadata(instanceCat.metadata)

            while instanceCat != None:
                t0 = self.WorkDir + 'catData%s_%i.ja' % (nFN, jobNum)
                t1 = self.WorkDir + 'catData%s_%i.p' % (nFN, jobNum)
                print 'Now pickling query type: %s' % objectType
                # Store job data files in instance
                time0 = time.time()
                instanceCat.jobAllocatorDataFile = t0
                allOutputFiles.append(t0) # Order is important
                instanceCat.jobAllocatorCatalogType = catalogType
                instanceCat.jobAllocatorObjectType = objectType
                cPickle.dump(instanceCat, open(t1, 'w'))
                print '   ...pickling took %i sec.' % (time.time() - time0)
                jobTypes.append(catalogType)
                jobNums.append(jobNum)
                jobPickleFiles.append(t1)
                jobNum += 1
                if numCats > 0:
                    curMD.mergeMetadata(instanceCat.metadata)

                # *** RRG:  Free up memory somehow here for instanceCat...
                del(instanceCat); instanceCat = None
                os.system('free -m')
                if self.maxCats >= 0 and (numCats + 1) >= self.maxCats:
                    instanceCat = None
                else:
                    print 'Querying DB for next chunk.'
                    t0 = time.time()
                    instanceCat = myQDB.getNextChunk()
                    print '   ...took %i sec.' % (time.time() - t0)
                    if instanceCat != None:
                        # This code adds some needed fields to the metadata
                        mUtils.trimGeneration.derivedTrimMetadata(instanceCat)
                    os.system('free -m')
                    numCats += 1

        # RRG:  For now this must be disabled
        #curMD.validateMetadata(catalogType, myQDB.opsim)
        mFName = self.WorkDir + 'metaData%s_%s.ja' % (nFN, catalogType)
        curMD.writeMetadata(mFName, catalogType, myQDB.opsim, newfile=True)

        # Finished with queryDB; clean up nicely.
        myQDB.closeSession()
        
        # For debug mode, don't start the clients
        if self.QueryOnly == True:
            print 'Full time for this file: %i sec' % (time.time()-fullTimeStart)
            print 'DEBUG:  Finished, no client processes started.'

        # Now fire off the jobs
        for i in range(len(jobNums)):
            jobId = '%s_%i' % (nFN, jobNums[i])
            self.executionDBManager.updateState(jobId, 'JAAdded')
            print 'Added job to execution DB: %s' % jobId
            #t0 = '/astro/apps/pkg/python64/bin/python jobAllocatorRun.py %i %s %s&' % (nFN, jobId, jobPickleFiles[i])
            #t0 = 'qsub ./runOneAthena.csh %i %s %s&' % (nFN, jobId, jobPickleFiles[i])
            #t0 = 'ssh minerva0 "(cd $PBS_O_WORKDIR; qsub ./runOneAthena.csh %i %s %s)"' % (nFN, jobId, jobPickleFiles[i])
            cwd0 = os.getcwd()
            f0 = open('tmpJA%s.csh' % jobId, 'w')
	    f0.write('#!/bin/csh\n#PBS -N jA%s\n#PBS -l walltime=1:00:00\n#PBS -e jA%s.err\n#PBS -o jA%s.out\ncd %s\nsource setupAthena.csh\npython jobAllocatorRun.py %s %s %s\necho Finished.' % (jobId, jobId, jobId, cwd0, nFN, jobId, jobPickleFiles[i]))
            f0.close()
            t0 = 'ssh minerva0 "(cd %s; /opt/torque/bin/qsub tmpJA%s.csh)"' % (cwd0, jobId)
            print t0
            os.system(t0)

        # Check that everything started within a certain time limit
        # On minerva, jobs may be queued indefinitely, so this won't work
        for i in range(len(jobNums)):
            jobId = '%s_%i' % (nFN, jobNums[i])
            tryNum = 0
            t0 = self.executionDBManager.queryState(jobId)
            while t0 != 'JAFinished':
                print 'Try %i: JA sees state for %s: %s' % (tryNum, jobId, t0)
                time.sleep(10)
                # Give it up to a day
                if tryNum > 60 * 60 * 24:
                    raise RuntimeError, '*** Job not started: %s' % jobId
                tryNum += 1
                t0 = self.executionDBManager.queryState(jobId)
            print 'Finished (Try %i):  JA sees state for %s: %s' % (tryNum, jobId, t0)

        # Finally, merge the output trim file
        trimFile = self.WorkDir + 'trim%s_%s.ja' % (nFN, catalogType)
        t0 = 'cat %s > %s' % (mFName, trimFile)
        print t0
        os.system(t0)
        for f in allOutputFiles:
            t0 = 'cat %s >> %s' % (f, trimFile)
            print t0
            os.system(t0)
        print 'Full time for this file: %i sec' % (time.time()-fullTimeStart)
        print 'Finished catting trim file: ', trimFile
def runTrim(csize, obsid, radius=2.1, outdir='.', repodir=None, je=None, compress=True, cleanup=False, dodither=False,
        objtypes = ['SSM', 'MSSTARS','WDSTARS','BHBSTARS','RRLYSTARS',\
        'GLENS','IMAGE','EBSTARS','CEPHEIDSTARS','EASTEREGGS','GALAXY_BULGE','GALAXY_DISK','AGN'],
        varobj = ['MSSTARS', 'RRLYSTARS', 'AGN', 'IMAGE', 'WDSTARS', 'EBSTARS', 'CEPHEIDSTARS']):
    if repodir is None:
        repodir = outdir
    if dodither:
        opsimname = 'DITHEREDOPSIM361'
    else:
        opsimname = 'OPSIM361'
    meta = None
    opsimid = None
    files = []
    writeJobEvent(je, 'start')
    cattype = "TRIM"
    warnings.simplefilter('ignore', category=exceptions.UserWarning)
    arcroot = "obsid%i"%(obsid)
    outBase = os.path.join(outdir, arcroot)
    if cleanup:
        cleanUpDirs(outBase, je)
    subdir = "pops"
    popsPath = os.path.join(outBase, subdir)
    if not os.path.exists(popsPath):
        os.makedirs(popsPath)
    writeJobEvent(je, 'MakeDirs', 'Made output directories %s'%(popsPath))
    files = []

    for objtype in objtypes:
        print "Doing object type: %s"%(objtype)
        writeJobEvent(je, 'Object:%s'%(objtype), 'Doing %s out of: %s'%(objtype, ",".join(objtypes)))
        filename = "trim_%i_%s.dat"%(obsid,objtype)
        outfile = os.path.join(popsPath,filename)
        myqdb = queryDB.queryDB(chunksize=csize,objtype=objtype,dithered=dodither)
        ic = myqdb.getInstanceCatalogById(obsid, radiusdeg=radius, opsim=opsimname)        
        if opsimid is None:
            opsimid = myqdb.opsim
        cnum = 0
        while ic is not None:
            writeJobEvent(je, 'GetChunk', 'Got chunk #%i of length %i'%(cnum, len(ic.dataArray[ic.dataArray.keys()[0]])))
            ic.makeTrimCoords()
            writeJobEvent(je, 'MakeTrim', 'Made trim coords for chunk #%i'%(cnum))
            if cnum == 0:
                if compress:
                    files.append(os.path.join(subdir,filename)+".gz")
                else:
                    files.append(os.path.join(subdir,filename))
                mUtils.trimGeneration.derivedTrimMetadata(ic)
                if meta is None:
                    meta = deepcopy(ic.metadata)
                else:
                    meta.mergeMetadata(ic.metadata)
            if objtype in varobj:
                writeJobEvent(je, 'DoVar', 'Applying variability to chunk #%i of type %s'%(cnum, objtype))
                ic.applyVariability()
            ic.validateData('TRIM')
            numRec = len(ic.dataArray[ic.dataArray.keys()[0]])
            if cnum == 0:
                ic.writeCatalogData(outfile, "TRIM", newfile = True, compress=compress)
                writeJobEvent(je, 'WriteChunk', 'Wrote first chunk of length %i'%(numRec))
            else:
                ic.writeCatalogData(outfile, "TRIM", newfile = False, compress=compress)
                writeJobEvent(je, 'WriteChunk', 'Wrote chunk #%i of length %i'%(cnum,numRec))
            if numRec == csize:
                ic = myqdb.getNextChunk()
            else:
                ic = None
            cnum += 1
        writeJobEvent(je, 'Finished Object:%s'%(objtype), 'Finished object %s'%(objtype))
    meta.validateMetadata(cattype, opsimid)
    metaOutfile = os.path.join(outBase,"metadata_%i.dat"%(obsid))
    meta.writeMetadata(metaOutfile, cattype, opsimid, newfile=True, filelist=files, compress=False)
    #files.append(os.path.join("obsid%i"%(obsid),"metadata_%i.dat"%(obsid)))
    writeJobEvent(je, 'WriteMetadata', 'Wrote metadata to %s'%(metaOutfile))
    mvFiles(repodir, outBase, arcroot, je=je)
    if cleanup:
        cleanUpDirs(outBase, je)
    writeJobEvent(je, 'stop')
#!/usr/bin/env python
import numpy
import math
import copy
import cPickle
from lsst.sims.catalogs.generation.db import queryDB
from lsst.sims.catalogs.measures.astrometry import Bbox
import lsst.sims.catalogs.measures.utils as mUtils

csize = 10
myqdb = queryDB.queryDB(chunksize=csize, objtype="ALLSTARS")
ic = myqdb.getInstanceCatalogById(85748128)
print ic.metadata.parameters
mUtils.trimGeneration.derivedTrimMetadata(ic)
ic.calculateUnrefractedAltAz()
print ic.metadata.parameters
curMD = copy.deepcopy(ic.metadata)
curMD.writeMetadata('testMetadata.dat', 'TRIM', myqdb.opsim, newfile=True)
cPickle.dump(ic, open('test.pkl', 'w'))
#ic.makeHelio()
ic.makeTrimCoords()
ic.metadata.validateMetadata('TRIM', myqdb.opsim)
print ic.dataArray
ic.validateData('TRIM')
ic.writeCatalogData('iCTest.txt', 'TRIM')
ic.metadata.writeMetadata('iCTest.meta','TRIM', myqdb.opsim, newfile = True)

if __name__ == "__main__":
    csize = 100000
    nsamp = 10
    magcut = 21.5
    boxsizedeg = 0.2
    halfboxsizedeg = boxsizedeg / 2.
    sqarea = (math.sin(halfboxsizedeg * math.pi / 180.) - math.sin(
        -halfboxsizedeg * math.pi / 180.)) * (boxsizedeg * math.pi / 180.)
    radius = math.acos(1 - (sqarea / (2. * math.pi))) * 180. / math.pi
    ra, dec = generateRaDec(nsamp)
    fhout = open(sys.argv[1], "w")
    for i in xrange(nsamp):
        nums = {'u': 0, 'g': 0, 'r': 0, 'i': 0, 'z': 0, 'y': 0}
        expmjd = 0.
        myqdb = queryDB.queryDB(chunksize=csize,
                                objtype='CALIBSTARS',
                                filetypes=('CALIB', ))
        ic = myqdb.getInstanceCatalogByCirc(ra[i],
                                            dec[i],
                                            radius,
                                            expmjd=expmjd)
        while not ic is None:
            for f in nums.keys():
                nums[f] += len(numpy.where(ic.dataArray[f] < magcut)[0])
            ic = myqdb.getNextChunk()
        fhout.write(",".join([str(x) for x in\
            (ra[i],dec[i],sqarea,nums['u'],nums['g'],nums['r'],nums['i'],nums['z'],nums['y'])])+"\n")
        fhout.flush()
    fhout.close()
Ejemplo n.º 13
0
def runDia(csize,
           obsidList,
           radius=2.1,
           outdir='.',
           repodir=None,
           je=None,
           compress=True,
           cleanup=False):
    if repodir is None:
        repodir = outdir
    meta = None
    opsimid = None
    writeJobEvent(je, 'start')
    cattype = "DIASOURCE"
    objtype = 'SSM'
    warnings.simplefilter('ignore', category=exceptions.UserWarning)
    arcroot = "diaBatchStarting_%i" % (obsidList[0])
    outBase = os.path.join(outdir, arcroot)
    if cleanup:
        cleanUpDirs(outBase, je)
    if not os.path.exists(outBase):
        os.makedirs(outBase)
    writeJobEvent(je, 'MakeDirs', 'Made output directories %s' % (outBase))
    nid = 0
    for obsid in obsidList:
        writeJobEvent(je, 'Obshistid:%s' % (obsid),
                      'Doing %i out of %i total' % (nid, len(obsidList)))
        filename = "dia_%i.dat" % (obsid)
        outfile = os.path.join(outBase, filename)
        myqdb = queryDB.queryDB(chunksize=csize,
                                objtype=objtype,
                                filetypes=(cattype, ))
        ic = myqdb.getInstanceCatalogById(obsid, radiusdeg=radius)
        if opsimid is None:
            opsimid = myqdb.opsim
        cnum = 0
        while ic is not None:
            writeJobEvent(
                je, 'GetChunk', 'Got chunk #%i of length %i' %
                (cnum, len(ic.dataArray[ic.dataArray.keys()[0]])))
            numRec = len(ic.dataArray[ic.dataArray.keys()[0]])
            if cnum == 0:
                ic.metadata.validateMetadata(cattype, opsimid)
                ic.metadata.writeMetadata(outfile,
                                          cattype,
                                          opsimid,
                                          newfile=True,
                                          filelist=None,
                                          compress=compress)
                writeJobEvent(je, 'WriteMetadata',
                              'Wrote metadata to %s' % (outfile))
            ic.validateData(cattype)
            ic.writeCatalogData(outfile,
                                cattype,
                                newfile=False,
                                compress=compress)
            writeJobEvent(je, 'WriteChunk',
                          'Wrote chunk #%i of length %i' % (cnum, numRec))
            ic = myqdb.getNextChunk()
            cnum += 1
        myqdb.closeSession()
        nid += 1
    mvFiles(repodir, outBase, arcroot, je=je)
    if cleanup:
        cleanUpDirs(outBase, je)
    writeJobEvent(je, 'stop')
Ejemplo n.º 14
0
def runTrim(csize, obsid, radius=2.1, outdir='.', repodir=None, je=None, compress=True, cleanup=False, dodither=False,
        objtypes = ['SSM', 'MSSTARS','WDSTARS','BHBSTARS','RRLYSTARS',\
        'GLENS','IMAGE','EBSTARS','CEPHEIDSTARS','EASTEREGGS','GALAXY_BULGE','GALAXY_DISK','AGN'],
        varobj = ['MSSTARS', 'RRLYSTARS', 'AGN', 'IMAGE', 'WDSTARS', 'EBSTARS', 'CEPHEIDSTARS']):
    if repodir is None:
        repodir = outdir
    if dodither:
        opsimname = 'DITHEREDOPSIM361'
    else:
        opsimname = 'OPSIM361'
    meta = None
    opsimid = None
    files = []
    writeJobEvent(je, 'start')
    cattype = "TRIM"
    warnings.simplefilter('ignore', category=exceptions.UserWarning)
    arcroot = "obsid%i" % (obsid)
    outBase = os.path.join(outdir, arcroot)
    if cleanup:
        cleanUpDirs(outBase, je)
    subdir = "pops"
    popsPath = os.path.join(outBase, subdir)
    if not os.path.exists(popsPath):
        os.makedirs(popsPath)
    writeJobEvent(je, 'MakeDirs', 'Made output directories %s' % (popsPath))
    files = []

    for objtype in objtypes:
        print "Doing object type: %s" % (objtype)
        writeJobEvent(je, 'Object:%s' % (objtype),
                      'Doing %s out of: %s' % (objtype, ",".join(objtypes)))
        filename = "trim_%i_%s.dat" % (obsid, objtype)
        outfile = os.path.join(popsPath, filename)
        myqdb = queryDB.queryDB(chunksize=csize,
                                objtype=objtype,
                                dithered=dodither)
        ic = myqdb.getInstanceCatalogById(obsid,
                                          radiusdeg=radius,
                                          opsim=opsimname)
        if opsimid is None:
            opsimid = myqdb.opsim
        cnum = 0
        while ic is not None:
            writeJobEvent(
                je, 'GetChunk', 'Got chunk #%i of length %i' %
                (cnum, len(ic.dataArray[ic.dataArray.keys()[0]])))
            ic.makeTrimCoords()
            writeJobEvent(je, 'MakeTrim',
                          'Made trim coords for chunk #%i' % (cnum))
            if cnum == 0:
                if compress:
                    files.append(os.path.join(subdir, filename) + ".gz")
                else:
                    files.append(os.path.join(subdir, filename))
                mUtils.trimGeneration.derivedTrimMetadata(ic)
                if meta is None:
                    meta = deepcopy(ic.metadata)
                else:
                    meta.mergeMetadata(ic.metadata)
            if objtype in varobj:
                writeJobEvent(
                    je, 'DoVar',
                    'Applying variability to chunk #%i of type %s' %
                    (cnum, objtype))
                ic.applyVariability()
            ic.validateData('TRIM')
            numRec = len(ic.dataArray[ic.dataArray.keys()[0]])
            if cnum == 0:
                ic.writeCatalogData(outfile,
                                    "TRIM",
                                    newfile=True,
                                    compress=compress)
                writeJobEvent(je, 'WriteChunk',
                              'Wrote first chunk of length %i' % (numRec))
            else:
                ic.writeCatalogData(outfile,
                                    "TRIM",
                                    newfile=False,
                                    compress=compress)
                writeJobEvent(je, 'WriteChunk',
                              'Wrote chunk #%i of length %i' % (cnum, numRec))
            if numRec == csize:
                ic = myqdb.getNextChunk()
            else:
                ic = None
            cnum += 1
        writeJobEvent(je, 'Finished Object:%s' % (objtype),
                      'Finished object %s' % (objtype))
    meta.validateMetadata(cattype, opsimid)
    metaOutfile = os.path.join(outBase, "metadata_%i.dat" % (obsid))
    meta.writeMetadata(metaOutfile,
                       cattype,
                       opsimid,
                       newfile=True,
                       filelist=files,
                       compress=False)
    #files.append(os.path.join("obsid%i"%(obsid),"metadata_%i.dat"%(obsid)))
    writeJobEvent(je, 'WriteMetadata', 'Wrote metadata to %s' % (metaOutfile))
    mvFiles(repodir, outBase, arcroot, je=je)
    if cleanup:
        cleanUpDirs(outBase, je)
    writeJobEvent(je, 'stop')
Ejemplo n.º 15
0
def findGalaxies(ra_center, dec_center, radius, rMagMax, baMin, baMax):

   # CONSTANTS
   rad2deg = 180./math.pi
   csize = 100000
   obj = "ASSEMBLEDGALAXY"
   expmjd = 0.
   gal_count = 0
   wavelen_step = 0.25
   Sixflts = ['float','float','float','float','float','float']


   # BANDPASSES 
   # Instantiate bandpasses and read into lsstbp.
   bpdir = os.getenv("LSST_THROUGHPUTS_BASELINE")
   filterlist = ('u', 'g', 'r', 'i', 'z', 'y')
   lsstbp = {}
   for f in filterlist:
      lsstbp[f] = Bandpass(wavelen_min=300,wavelen_max=1200, wavelen_step=0.1)
      lsstbp[f].readThroughput(os.path.join(bpdir, 'total_' + f + '.dat'), wavelen_step=wavelen_step)

   # SEDS
   # Read in all of the galaxy seds in the root directory:
   galdir = "/astro/net/lsst1/shared/data/galaxySED/"
   gals = {}
   gallist = os.listdir(galdir)
   for i in range(len(gallist)):
      if gallist[i].endswith('.gz'):
         gallist[i] = gallist[i][:-3]
   for gal in gallist:
      gals[gal] = Sed()
      gals[gal].readSED_flambda(os.path.join(galdir, gal))

   # Check on resampling - want all galaxy seds to have the same wavelength range.
   # (although our LSST ranges are 300 - 1200 nm, the SED's are ~27 to 2290 nm).
   if ((gals[gallist[0]].wavelen.min() < 30) & (gals[gallist[0]].wavelen.max() > 2000)):
       # If true, then gals[gallist[0]] is okay to use as a template -- this ought to be true.
       wavelen_match = gals[gallist[0]].wavelen
   else:
       print "Had to use simple wavelength array for matching"
       wavelen_match = numpy.arange(30, 2200, 0.1, dtype='float')
   for gal in gallist:
       if gals[gal].needResample(wavelen_match = wavelen_match):
           gals[gal].resampleSED(wavelen_match = wavelen_match)

   # Create the galactic a, b values from CCM 89 for the source galaxy and ours.
   # adjust for redshift, add Milky Way dust (recalculate a and b),
   # normalize fluxes and calculate magnitudes:
   # First: calculate source galaxy a/b on wavelength range required for 
   # source galaxy (internal)from CCM 89.
   a_int, b_int = gals[gallist[0]].setupCCMab()

   # Second: calculate milky way a/b on wavelength range required for calculating 
   # magnitudes - i.e. 300 to 1200 nm.
   # Set up a Sed object that is the same for all galaxies, all chunks.
   # Start with a flat SED with F_AB = 3631 Jy 
   tmpgal = Sed()
   tmpgal.setFlatSED(wavelen_min=300, wavelen_max=1200, wavelen_step=wavelen_step)
   a_mw, b_mw = tmpgal.setupCCMab()  # so this is a/b on 300-1200 range. 

   # Set up phi, the wavelength-normalized system response for each filter, for each bandpass.
   # sb is the system response function (throughputs).  Also set up a bandpass list, for
   # manyMagCalc method and initiate mags w/dust.
   bplist = []
   for f in filterlist:
      lsstbp[f].sbTophi()
      bplist.append(lsstbp[f])
   phiarray, dlambda = tmpgal.setupPhiArray(bplist)

   objId = numpy.empty(0)
   ra = numpy.empty(0)
   dec = numpy.empty(0)
   diskFluxNorm = numpy.empty(0)
   bulgeFluxNorm = numpy.empty(0)
   diskSedFilename = numpy.empty(0)
   bulgeSedFilename = numpy.empty(0)
   a_disk = numpy.empty(0)
   b_disk = numpy.empty(0)
   a_bulge = numpy.empty(0)
   b_bulge = numpy.empty(0)
   pa_d = numpy.empty(0)
   rdshft = numpy.empty(0)
   Rmags = numpy.empty(0)

   #QUERY
   #myqdb = queryDB.queryDB(chunksize=csize,objtype=obj, filetypes=('REFERENCECATALOG',))
   myqdb = queryDB.queryDB(chunksize=csize,objtype=obj, filetypes=('TEST',))
   # Specify a circular field of view:
   ic = myqdb.getInstanceCatalogByCirc(ra_center, dec_center, radius, expmjd=expmjd)
   # Begin iteratively acquiring data
   gal_count = 0
   while ic is not None:
      gal_loop = len(ic.dataArray['raJ2000'])   
      gal_count += gal_loop
      objId = numpy.concatenate((objId, ic.dataArray['objId']), axis=0)
      ra = numpy.concatenate((ra, ic.dataArray['raJ2000']*rad2deg), axis=0)
      dec = numpy.concatenate((dec, ic.dataArray['decJ2000']*rad2deg), axis=0)
      diskFluxNorm = numpy.concatenate((diskFluxNorm, ic.dataArray['diskFluxNorm']), axis=0)
      bulgeFluxNorm = numpy.concatenate((bulgeFluxNorm, ic.dataArray['bulgeFluxNorm']), axis=0)
      diskSedFilename = numpy.concatenate((diskSedFilename, ic.dataArray['diskSedFilename']), axis=0)
      bulgeSedFilename = numpy.concatenate((bulgeSedFilename, ic.dataArray['bulgeSedFilename']), axis=0)
      a_disk = numpy.concatenate((a_disk, ic.dataArray['semiMajorDisk']), axis=0)
      b_disk = numpy.concatenate((b_disk, ic.dataArray['semiMinorDisk']), axis=0)
      a_bulge = numpy.concatenate((a_bulge, ic.dataArray['semiMajorBulge']), axis=0)
      b_bulge = numpy.concatenate((b_bulge, ic.dataArray['semiMinorBulge']), axis=0)
      pa_d = numpy.concatenate((pa_d, ic.dataArray['positionAngleDisk']), axis=0)
      rdshft = numpy.concatenate((rdshft, ic.dataArray['redshift']), axis=0)
      # Get next chunk, if it exists.
      ic = myqdb.getNextChunk()
   myqdb.closeSession()


   # Calculate galactic coordinates:
   gLon = []
   gLat = []
   for i in range(gal_count):
      gcoord = afwCoord.IcrsCoord(ra[i], dec[i]).toGalactic()
      gLon.append(gcoord.getL(afwCoord.DEGREES)*math.pi/180.)
      gLat.append(gcoord.getB(afwCoord.DEGREES)*math.pi/180.)
   ebv_mw = (EBV.calculateEbv(gLon, gLat, ebvMapNorth, ebvMapSouth, interp = True))
   del gLon
   del gLat
   print 'gLon, gLat calculated'

   # Now calculate magnitudes for each galaxy.  If you have a bulge, initiate its Sed
   # instance, multiply fnu by the bulge flux normalization and apply the bulge dust
   # model (currently zero).  Next initiate a Sed instance for the disk (if you have
   # one), multiply fnu by the disk flux normalization and apply the disk dust model.
   # If you have bulge and disk Sed's, add them together; if not you'll just use 
   # whichever one you have.  Correct for redshift, resample the Sed since now it's
   # shifted in wavelength, add the Milky Way dust and calculate magnitudes using the
   # manyMag method.  Uncorrected Magnitudes (no dust, reddening, redshifting added)
   # are calculated as well.

   uncmags = numpy.zeros(gal_count, dtype={'names':['u','g','r','i','z','y'], 'formats':Sixflts})
   raEdge = numpy.zeros(len(ra))
   decEdge = numpy.zeros(len(ra))
   option = numpy.zeros(len(ra)) + 5
   raEdge = ra - a_disk/3600.*math.sin((pa_d)*math.pi/180.)
   decEdge = dec + a_disk/3600.*math.cos((pa_d)*math.pi/180.)

   # Uncorrected Magnitudes (no dust, reddening, redshifting added)
   for i in range(gal_count):
      galdisk = diskSedFilename[i]
      galbulge = bulgeSedFilename[i]
       #raEdge = ra[i] - a_disk[i]/3600.*math.sin((pa_d[i])*math.pi/180.)
      #decEdge = dec[i] + a_disk[i]/3600.*math.cos((pa_d[i])*math.pi/180.)
      if (galbulge is not None) and (galdisk is not None):
         ba_disk = b_disk[i]/a_disk[i]
         ba_bulge = b_bulge[i]/a_bulge[i]
         baRatio = (diskFluxNorm[i]*ba_disk + bulgeFluxNorm[i]
                    *ba_bulge)/(diskFluxNorm[i] + bulgeFluxNorm[i])
         if baMin <= baRatio <= baMax:
            option[i] = 2
         else: continue
         tmpbulge = uncMagCalc(rdshft[i], gals[galbulge].wavelen, gals[galbulge].flambda, 
                               multiFlux=bulgeFluxNorm[i])
         tmpdisk = uncMagCalc(rdshft[i], gals[galdisk].wavelen, gals[galdisk].flambda,
                              multiFlux=diskFluxNorm[i])
         newgal = uncMagCalc(rdshft[i], tmpdisk.wavelen, (tmpdisk.flambda+tmpbulge.flambda), finish=1)
         tmpmags = newgal.manyMagCalc(phiarray, dlambda)
      elif galbulge is not None:
         baRatio = b_disk[i]/a_disk[i]
         if baMin <= baRatio <= baMax:
            option[i] = 0
         else: continue
         tmpbulge = uncMagCalc(rdshft[i], gals[galbulge].wavelen, gals[galbulge].flambda, 
                               multiFlux=bulgeFluxNorm[i], finish=1)
         tmpmags = tmpbulge.manyMagCalc(phiarray, dlambda)
      elif galdisk is not None:
         baRatio = b_disk[i]/a_disk[i]
         if baMin <= baRatio <= baMax:
            option[i] = 1
         else: continue
         tmpdisk = uncMagCalc(rdshft[i], gals[galdisk].wavelen, gals[galdisk].flambda,
                              multiFlux=diskFluxNorm[i], finish=1)
         tmpmags = tmpdisk.manyMagCalc(phiarray, dlambda)
      j = 0
      for f in filterlist:
         uncmags[f][i] = tmpmags[j]
         j += 1

   del diskSedFilename, bulgeSedFilename, tmpmags
   del Av_d, Rv_d, Av_b, Rv_b, rdshft, uncmags, raEdge, decEdge

   idx = numpy.where((tmpmags[2] <= rMagMax) & (option != 5))
   rmags = tmpmags[2][idx]
   objId = ojbId[idx]
   option.append(option)
   ra = ra[idx]
   dec = dec[idx]
   baRatio = baRatio[idx]
   a_disk = a_disk[idx]
   b_disk = b_disk[idx]
   a_bulge = a_bulge[idx]
   b_bulge = b_bulge[idx]
   diskFluxNorm = diskFluxNorm[idx]
   bulgeFluxNorm = bulgeFluxNorm[idx]
   pa_d = pa_d[idx]
   raEdge = raEdge[idx]
   decEdge = decEdge[idx]
   #print '%11.7f %11.7f %11.7f %11.7f' % (ra[i], dec[i], tmpmags[2], baRatio)


   print '# iterations: ', iteration + 1
   print 'Total # of galaxies: %d' % (gal_count)
   print 'Number of qualifying Galaxies within a radius of %s deg. is %s' % (radius, len(rmags))
   #print 'len(ra), len(rmags), len(baRatio), len(option): ', len(ra), len(rmags), len(baRatio), len(option)
   return ra, dec, rmags, baRatio, option, objId, a_disk, b_disk, a_bulge, b_bulge, diskFluxNorm, bulgeFluxNorm, pa_d, raEdge, decEdge
#!/usr/bin/env python
import numpy, math, copy, cPickle, os, sys
from lsst.sims.catalogs.generation.db import queryDB
from lsst.sims.catalogs.measures.astrometry import Bbox
import lsst.sims.catalogs.measures.utils as mUtils

csize = 10
myqdb = queryDB.queryDB(chunksize=csize, objtype=sys.argv[1])
ic = myqdb.getInstanceCatalogById(85748128)
print ic.metadata.parameters
mUtils.trimGeneration.derivedTrimMetadata(ic)
ic.calculateUnrefractedAltAz()
print ic.metadata.parameters
curMD = copy.deepcopy(ic.metadata)
curMD.writeMetadata('testMetadata.dat', 'TRIM', myqdb.opsim, newfile=True)
cPickle.dump(ic, open('test.pkl', 'w'))
#ic.makeHelio()
ic.makeTrimCoords()
ic.metadata.validateMetadata('TRIM', myqdb.opsim)
print ic.dataArray
ic.validateData('TRIM')
ic.writeCatalogData('iCTest.txt', 'TRIM')
ic.metadata.writeMetadata('iCTest.meta','TRIM', myqdb.opsim, newfile = True)

Ejemplo n.º 17
0
#!/usr/bin/env python
import numpy
import math
import copy
import cPickle
from lsst.sims.catalogs.generation.db import queryDB
from lsst.sims.catalogs.measures.astrometry import Bbox
import lsst.sims.catalogs.measures.utils as mUtils

csize = 10
myqdb = queryDB.queryDB(chunksize=csize, objtype="ALLSTARS")
ic = myqdb.getInstanceCatalogById(85748128)
print ic.metadata.parameters
curMD = copy.deepcopy(ic.metadata)
mUtils.trimGeneration.derivedTrimMetadata(ic)
print ic.metadata.parameters
cPickle.dump(ic, open('test.pkl', 'w'))
ic.makeHelio()
ic.makeTrimCoords()
ic.metadata.validateMetadata('TRIM', myqdb.opsim)
print ic.dataArray
ic.validateData('TRIM')
ic.writeCatalogData('iCTest.txt', 'TRIM')
ic.metadata.writeMetadata('iCTest.meta', 'TRIM', myqdb.opsim, newfile=True)
def generateRaDec(n):
  RA = 360.*rand.random(n)
  Dec = numpy.arcsin(2.*rand.random(n)-1.)*180./numpy.pi
  return RA, Dec

if __name__ == "__main__":
  csize = 100000
  nsamp = 10
  magcut = 21.5
  boxsizedeg = 0.2
  halfboxsizedeg = boxsizedeg/2.
  sqarea = (math.sin(halfboxsizedeg*math.pi/180.) -
          math.sin(-halfboxsizedeg*math.pi/180.))*(boxsizedeg*math.pi/180.)
  radius = math.acos(1-(sqarea/(2.*math.pi)))*180./math.pi
  ra,dec = generateRaDec(nsamp)
  fhout = open(sys.argv[1], "w")
  for i in xrange(nsamp):
    nums = {'u':0,'g':0,'r':0,'i':0,'z':0,'y':0}
    expmjd = 0.
    myqdb = queryDB.queryDB(chunksize=csize,objtype='CALIBSTARS',filetypes=('CALIB',))
    ic = myqdb.getInstanceCatalogByCirc(ra[i],dec[i],radius,expmjd=expmjd)
    while not ic is None:
      for f in nums.keys():
        nums[f] += len(numpy.where(ic.dataArray[f] < magcut)[0])
      ic = myqdb.getNextChunk()
    fhout.write(",".join([str(x) for x in\
        (ra[i],dec[i],sqarea,nums['u'],nums['g'],nums['r'],nums['i'],nums['z'],nums['y'])])+"\n")
    fhout.flush()
  fhout.close()

Ejemplo n.º 19
0
#!/usr/bin/env python
import numpy, math, copy, cPickle, os, sys
from lsst.sims.catalogs.generation.db import queryDB
from lsst.sims.catalogs.measures.astrometry import Bbox
import lsst.sims.catalogs.measures.utils as mUtils

csize = 10
myQDB = queryDB.queryDB(chunksize=csize, objtype='ALLSTARS')
ic = myQDB.getInstanceCatalogById(85748128)
print ic.metadata.parameters
mUtils.trimGeneration.derivedTrimMetadata(ic)
ic.makeTrimCoords()
print ic.metadata.parameters
curMD = copy.deepcopy(ic.metadata)

myQDB = queryDB.queryDB(chunksize=csize, objtype='GALAXY_DISK')
ic = myQDB.getInstanceCatalogById(85748128)
print ic.metadata.parameters
mUtils.trimGeneration.derivedTrimMetadata(ic)
ic.makeTrimCoords()
print ic.metadata.parameters
curMD.mergeMetadata(ic.metadata)

myQDB = queryDB.queryDB(chunksize=csize, objtype='GALAXY_BULGE')
ic = myQDB.getInstanceCatalogById(85748128)
print ic.metadata.parameters
mUtils.trimGeneration.derivedTrimMetadata(ic)
ic.makeTrimCoords()
print ic.metadata.parameters
curMD.mergeMetadata(ic.metadata)
Ejemplo n.º 20
0
def runCalib(csize,
             obsid,
             radius=2.1,
             outdir='.',
             repodir=None,
             je=None,
             compress=True,
             cleanup=False):
    if repodir is None:
        repodir = outdir
    meta = None
    opsimid = None
    files = []
    writeJobEvent(je, 'start')
    cattype = "CALIB"
    print "Warning -- only MSSTARS in calib cats currently"
    #objtypes = ['MSSTARS','WDSTARS','BHBSTARS','RRLYSTARS', \
    #            'GLENS','IMAGE','EBSTARS','CEPHEIDSTARS','EASTEREGGS','GALAXY_BULGE','GALAXY_DISK','AGN']
    #varobj = ['MSSTARS', 'RRLYSTARS', 'AGN', 'IMAGE', 'WDSTARS', 'EBSTARS', 'CEPHEIDSTARS']
    objtypes = [
        'MSSTARS',
    ]
    varobj = [
        'MSSTARS',
    ]
    warnings.simplefilter('ignore', category=exceptions.UserWarning)
    arcroot = "obsid%i" % (obsid)
    outBase = os.path.join(outdir, arcroot)
    if cleanup:
        cleanUpDirs(outBase, je)
    subdir = "pops"
    popsPath = os.path.join(outBase, subdir)
    if not os.path.exists(popsPath):
        os.makedirs(popsPath)
    writeJobEvent(je, 'MakeDirs', 'Made output directories %s' % (popsPath))
    files = []
    for objtype in objtypes:
        writeJobEvent(je, 'Object:%s' % (objtype),
                      'Doing %s out of: %s' % (objtype, ",".join(objtypes)))
        filename = "trim_%i_%s.dat" % (obsid, objtype)
        outfile = os.path.join(popsPath, filename)
        myqdb = queryDB.queryDB(chunksize=csize, objtype=objtype)
        # This grabs the metadata and grabs the catalog in a circle around central pointing.
        #  WHY COMBINED? DO WE NEED TO SPLIT TO PROVIDE ADDITIONAL METADATA EASILY?
        ic = myqdb.getInstanceCatalogById(obsid, radiusdeg=radius)
        if opsimid is None:
            opsimid = myqdb.opsim
        cnum = 0
        while ic is not None:
            writeJobEvent(
                je, 'GetChunk', 'Got chunk #%i of length %i' %
                (cnum, len(ic.dataArray[ic.dataArray.keys()[0]])))
            # Calculate focal plane mm/mm and chip x/y for each object.
            ic.makeXyCoords()
            writeJobEvent(je, 'MakeXY',
                          'Made xy coords for chunk #%i' % (cnum))
            # Calculate calib counts for each object.
            ic.calcCalibCounts()
            writeJobEvent(
                je, 'calcCalibCounts',
                'Generated throughput curve and calculated counts for chunk #%i'
                % (cnum))
            if cnum == 0:
                if compress:
                    files.append(os.path.join(subdir, filename) + ".gz")
                else:
                    files.append(os.path.join(subdir, filename))
                # Okay, need to understand this a little more ... think it's mostly formatting/writing metadata.
                # NEED TO WRITE A NEW VERSION?
                mUtils.trimGeneration.derivedTrimMetadata(ic)
                if meta is None:
                    meta = deepcopy(ic.metadata)
                else:
                    meta.mergeMetadata(ic.metadata)
            if objtype in varobj:
                writeJobEvent(
                    je, 'DoVar',
                    'Applying variability to chunk #%i of type %s' %
                    (cnum, objtype))
                ic.applyVariability()
            # WILL NEED SOMETHING HERE FOR CALIB CAT
            ic.validateData('TRIM')
            numRec = len(ic.dataArray[ic.dataArray.keys()[0]])
            if cnum == 0:
                ic.writeCatalogData(outfile,
                                    "TRIM",
                                    newfile=True,
                                    compress=compress)
                writeJobEvent(je, 'WriteChunk',
                              'Wrote first chunk of length %i' % (numRec))
            else:
                ic.writeCatalogData(outfile,
                                    "TRIM",
                                    newfile=False,
                                    compress=compress)
                writeJobEvent(je, 'WriteChunk',
                              'Wrote chunk #%i of length %i' % (cnum, numRec))
            if numRec == csize:
                ic = myqdb.getNextChunk()
            else:
                ic = None
            cnum += 1
        writeJobEvent(je, 'Finished Object:%s' % (objtype),
                      'Finished object %s' % (objtype))
    meta.validateMetadata(cattype, opsimid)
    metaOutfile = os.path.join(outBase, "metadata_%i.dat" % (obsid))
    meta.writeMetadata(metaOutfile,
                       cattype,
                       opsimid,
                       newfile=True,
                       filelist=files,
                       compress=False)
    #files.append(os.path.join("obsid%i"%(obsid),"metadata_%i.dat"%(obsid)))
    writeJobEvent(je, 'WriteMetadata', 'Wrote metadata to %s' % (metaOutfile))
    mvFiles(repodir, outBase, arcroot, je=je)
    if cleanup:
        cleanUpDirs(outBase, je)
    writeJobEvent(je, 'stop')