Exemple #1
0
 def testDbObj(self):
     mystars = CatalogDBObject.from_objid('testCatalogDBObjectTeststars')
     mygals = CatalogDBObject.from_objid('testCatalogDBObjectTestgals')
     result = mystars.query_columns(obs_metadata=self.obsMd)
     tu.writeResult(result, "/dev/null")
     result = mygals.query_columns(obs_metadata=self.obsMd)
     tu.writeResult(result, "/dev/null")
 def testDbObj(self):
     mystars = CatalogDBObject.from_objid('testCatalogDBObjectTeststars')
     mygals = CatalogDBObject.from_objid('testCatalogDBObjectTestgals')
     result = mystars.query_columns(obs_metadata=self.obsMd)
     tu.writeResult(result, "/dev/null")
     result = mygals.query_columns(obs_metadata=self.obsMd)
     tu.writeResult(result, "/dev/null")
def examplePhoSimCatalogs():
    """
    This method outputs several phoSim input files consisting of different
    types of objects(stars, galaxy bulges, galaxy disks, and AGNs)

    The files created are
    catalog_test_msstars.dat
    catalog_test_galaxyDisk.dat
    catalog_test_galaxyBulge.dat
    catalog_test_galaxyAgn.dat

    (versions are also created that end in _chunked.dat; these should have
    the same contents)

    """
    obsMD = bcm.OpSim3_61DBObject()
    obs_metadata = obsMD.getObservationMetaData(88544919, 0.1, makeCircBounds=True)
    objectDict = {}
    objectDict['testStars'] = {'dbobj':CatalogDBObject.from_objid('msstars'),
                               'constraint':None,
                               'filetype':'phoSim_catalog_POINT',
                               'obsMetadata':obs_metadata}
    objectDict['testGalaxyBulge'] = {'dbobj':CatalogDBObject.from_objid('galaxyBulge'),
                               'constraint':"mass_bulge > 1. and sedname_bulge is not NULL",
                               'filetype':'phoSim_catalog_SERSIC2D',
                               'obsMetadata':obs_metadata}
    objectDict['testGalaxyDisk'] = {'dbobj':CatalogDBObject.from_objid('galaxyDisk'),
                               'constraint':"DiskLSSTg < 20. and sedname_disk is not NULL",
                               'filetype':'phoSim_catalog_SERSIC2D',
                               'obsMetadata':obs_metadata}
    objectDict['testGalaxyAgn'] = {'dbobj':CatalogDBObject.from_objid('galaxyAgn'),
                               'constraint':"sedname_agn is not NULL",
                               'filetype':'phoSim_catalog_ZPOINT',
                               'obsMetadata':obs_metadata}

    for objKey in objectDict.keys():
        dbobj = objectDict[objKey]['dbobj']
        t = dbobj.getCatalog(objectDict[objKey]['filetype'],
                             obs_metadata=objectDict[objKey]['obsMetadata'],
                             constraint=objectDict[objKey]['constraint'])

        print
        print "These are the required columns from the database:"
        print t.db_required_columns()
        print
        print "These are the columns that will be output to the file:"
        print t.column_outputs
        print

        filename = 'catalog_test_%s.dat'%(dbobj.objid)
        print "querying and writing catalog to %s:" % filename
        t.write_catalog(filename)
        filename = 'catalog_test_%s_chunked.dat'%(dbobj.objid)
        t.write_catalog(filename, chunk_size=10)
        print " - finished"
Exemple #4
0
    def testNonsenseSelectOnlySomeColumns_passConnection(self):
        """
        Test a query performed only a subset of the available columns

        Pass connection directly in to the constructor.
        """
        myNonsense_base = CatalogDBObject.from_objid('Nonsense')
        myNonsense = myNonsenseDB_noConnection(connection=myNonsense_base.connection)

        mycolumns = ['NonsenseId', 'NonsenseRaJ2000', 'NonsenseMag']

        query = myNonsense.query_columns(colnames=mycolumns, constraint = 'ra < 45.', chunk_size=100)

        goodPoints = []

        ct = 0
        for chunk in query:
            for row in chunk:
                ct += 1
                self.assertLess(row[1], 45.0)

                dex = numpy.where(self.baselineData['id'] == row[0])[0][0]

                goodPoints.append(row[0])

                self.assertAlmostEqual(numpy.radians(self.baselineData['ra'][dex]), row[1], 3)
                self.assertAlmostEqual(self.baselineData['mag'][dex], row[2], 3)

        self.assertGreater(ct, 0)

        ct = 0
        for entry in [xx for xx in self.baselineData if xx[0] not in goodPoints]:
            self.assertGreater(entry[1], 45.0)
            ct += 1
        self.assertGreater(ct, 0)
        def testCanBeNull(self):
            """
            Test to make sure that we can still write all rows to catalogs,
            even those with null values in key columns
            """
            dbobj = CatalogDBObject.from_objid('cannotBeNull')
            cat = dbobj.getCatalog('canBeNull')
            fileName = 'canBeNullTestFile.txt'
            cat.write_catalog(fileName)
            dtype = numpy.dtype([('id',int),('n1',numpy.float64),('n2',numpy.float64),('n3',numpy.float64),
                                 ('n4',(str,40)), ('n5',(unicode,40))])
            testData = numpy.genfromtxt(fileName,dtype=dtype,delimiter=',')

            for i in range(len(self.baselineOutput)):
                #make sure that all of the rows in self.baselineOutput are represented in
                #testData
                for (k,xx) in enumerate(self.baselineOutput[i]):
                    if k<4:
                        if not numpy.isnan(xx):
                            self.assertAlmostEqual(xx,testData[i][k], 3)
                        else:
                            self.assertTrue(numpy.isnan(testData[i][k]))
                    else:
                        msg = '%s is not %s' % (xx,testData[i][k])
                        self.assertEqual(xx.strip(),testData[i][k].strip(),msg=msg)

            self.assertEqual(i,99)
            self.assertEqual(len(testData), len(self.baselineOutput))

            if os.path.exists(fileName):
                os.unlink(fileName)
    def testNonsenseSelectOnlySomeColumns(self):
        """
        Test a query performed only a subset of the available columns
        """
        myNonsense = CatalogDBObject.from_objid('Nonsense')

        mycolumns = ['NonsenseId', 'NonsenseRaJ2000', 'NonsenseMag']

        query = myNonsense.query_columns(colnames=mycolumns,
                                         constraint='ra < 45.',
                                         chunk_size=100)

        goodPoints = []

        for chunk in query:
            for row in chunk:
                self.assertTrue(row[1] < 45.0)

                dex = numpy.where(self.baselineData['id'] == row[0])[0][0]

                goodPoints.append(row[0])

                self.assertAlmostEqual(
                    numpy.radians(self.baselineData['ra'][dex]), row[1], 3)
                self.assertAlmostEqual(self.baselineData['mag'][dex], row[2],
                                       3)

        for entry in [
                xx for xx in self.baselineData if xx[0] not in goodPoints
        ]:
            self.assertTrue(entry[1] > 45.0)
def exampleAirmass(airmass,ra = 0.0, dec = 0.0, tol = 10.0, radiusDeg = 0.1,
            makeBoxBounds=False, makeCircBounds=True):
    """
    This method will output a catalog of stars based on an OpSim pointing with
    a specific airmass.  It searches OpSim for pointings with the specified airmass
    and RA, Dec within a box bounded by tol (in degrees).  It creates observation meta
    data out of the first pointing found and uses that to construct the catalog.

    The catalog is output to stars_airmass_test.dat
    """

    obsMD=bcm.OpSim3_61DBObject()

    #The code below will query the OpSim data base object created above.
    #The query will be based on a box in RA, Dec and a specific airmass value

    airmassConstraint = "airmass="+str(airmass) #an SQL constraint that the airmass must be equal to
                                                #the passed value

    skyBounds = SpatialBounds.getSpatialBounds('box', ra, dec, tol) #bounds on region of sky

    query = obsMD.executeConstrainedQuery(skyBounds, constraint=airmassConstraint)

    #convert q into observation meta data for use in a catalog
    obsMetaData = obsMD.getObservationMetaData(query['Opsim_obshistid'][0],radiusDeg,makeBoxBounds=makeBoxBounds,
                   makeCircBounds=makeCircBounds)

    #create and output a reference catalog of stars based on our query to opSim
    dbobj = CatalogDBObject.from_objid('allstars')
    catalog = dbobj.getCatalog('ref_catalog_star', obs_metadata = obsMetaData)
    catalog.write_catalog('stars_airmass_test.dat')
Exemple #8
0
    def testNonsenseArbitraryConstraints(self):
        """
        Test a query with a user-specified constraint on the magnitude column
        """

        myNonsense = CatalogDBObject.from_objid('Nonsense')

        raMin = 50.0
        raMax = 150.0
        decMax = 30.0
        decMin = -20.0
        raCenter=0.5*(raMin+raMax)
        decCenter=0.5*(decMin+decMax)

        mycolumns = ['NonsenseId', 'NonsenseRaJ2000', 'NonsenseDecJ2000', 'NonsenseMag']

        boxObsMd = ObservationMetaData(boundType='box', pointingRA=raCenter, pointingDec=decCenter,
                    boundLength=numpy.array([0.5*(raMax-raMin), 0.5*(decMax-decMin)]), mjd=52000., bandpassName='r')

        boxQuery = myNonsense.query_columns(colnames = mycolumns,
                      obs_metadata=boxObsMd, chunk_size=100, constraint = 'mag > 11.0')

        raMin = numpy.radians(raMin)
        raMax = numpy.radians(raMax)
        decMin = numpy.radians(decMin)
        decMax = numpy.radians(decMax)

        goodPoints = []

        ct = 0
        for chunk in boxQuery:
            for row in chunk:
                ct += 1

                self.assertLess(row[1], raMax)
                self.assertGreater(row[1], raMin)
                self.assertLess(row[2], decMax)
                self.assertGreater(row[2], decMin)
                self.assertGreater(row[3], 11.0)

                dex = numpy.where(self.baselineData['id'] == row[0])[0][0]

                #keep a list of the points returned by the query
                goodPoints.append(row[0])

                self.assertAlmostEqual(numpy.radians(self.baselineData['ra'][dex]), row[1], 3)
                self.assertAlmostEqual(numpy.radians(self.baselineData['dec'][dex]), row[2], 3)
                self.assertAlmostEqual(self.baselineData['mag'][dex], row[3], 3)

        self.assertGreater(ct, 0)

        ct = 0
        for entry in [xx for xx in self.baselineData if xx[0] not in goodPoints]:
            #make sure that the points not returned by the query did, in fact, violate one of the
            #constraints of the query (either the box bound or the magnitude cut off)
            switch = (entry[1] > raMax or entry[1] < raMin or entry[2] >decMax or entry[2] < decMin or entry[3]<11.0)

            self.assertTrue(switch)
            ct += 1
        self.assertGreater(ct, 0)
    def testNonsenseSelectOnlySomeColumns(self):
        """
        Test a query performed only a subset of the available columns
        """
        myNonsense = CatalogDBObject.from_objid('Nonsense')

        mycolumns = ['NonsenseId','NonsenseRaJ2000','NonsenseMag']

        query = myNonsense.query_columns(colnames=mycolumns, constraint = 'ra < 45.', chunk_size=100)

        goodPoints = []

        for chunk in query:
            for row in chunk:
                self.assertTrue(row[1]<45.0)

                dex = numpy.where(self.baselineData['id'] == row[0])[0][0]

                goodPoints.append(row[0])

                self.assertAlmostEqual(numpy.radians(self.baselineData['ra'][dex]),row[1],3)
                self.assertAlmostEqual(self.baselineData['mag'][dex],row[2],3)


        for entry in [xx for xx in self.baselineData if xx[0] not in goodPoints]:
            self.assertTrue(entry[1]>45.0)
        def testCanBeNull(self):
            """
            Test to make sure that we can still write all rows to catalogs,
            even those with null values in key columns
            """
            baselineOutput = createCannotBeNullTestDB()
            dbobj = CatalogDBObject.from_objid('cannotBeNull')
            cat = dbobj.getCatalog('canBeNull')
            fileName = 'canBeNullTestFile.txt'
            cat.write_catalog(fileName)
            dtype = numpy.dtype([('id',int),('n1',numpy.float64),('n2',numpy.float64),('n3',numpy.float64)])
            testData = numpy.genfromtxt(fileName,dtype=dtype,delimiter=',')

            for i in range(len(baselineOutput)):
                if not numpy.isnan(baselineOutput['n2'][i]):
                    for (k,xx) in enumerate(baselineOutput[i]):
                        if not numpy.isnan(xx):
                            self.assertAlmostEqual(xx,testData[i][k],3)
                        else:
                            self.assertTrue(numpy.isnan(testData[i][k]))

            self.assertEqual(i,99)

            if os.path.exists(fileName):
                os.unlink(fileName)
    def testCanBeNull(self):
        """
            Test to make sure that we can still write all rows to catalogs,
            even those with null values in key columns
            """
        dbobj = CatalogDBObject.from_objid('cannotBeNull')
        cat = dbobj.getCatalog('canBeNull')
        fileName = 'canBeNullTestFile.txt'
        cat.write_catalog(fileName)
        dtype = numpy.dtype([('id', int), ('n1', numpy.float64),
                             ('n2', numpy.float64), ('n3', numpy.float64),
                             ('n4', (str, 40)), ('n5', (unicode, 40))])
        testData = numpy.genfromtxt(fileName, dtype=dtype, delimiter=',')

        for i in range(len(self.baselineOutput)):
            #make sure that all of the rows in self.baselineOutput are represented in
            #testData
            for (k, xx) in enumerate(self.baselineOutput[i]):
                if k < 4:
                    if not numpy.isnan(xx):
                        self.assertAlmostEqual(xx, testData[i][k], 3)
                    else:
                        self.assertTrue(numpy.isnan(testData[i][k]))
                else:
                    msg = '%s is not %s' % (xx, testData[i][k])
                    self.assertEqual(xx.strip(),
                                     testData[i][k].strip(),
                                     msg=msg)

        self.assertEqual(i, 99)
        self.assertEqual(len(testData), len(self.baselineOutput))

        if os.path.exists(fileName):
            os.unlink(fileName)
    def testHybridVariability(self):
        """
        Test that we can generate a catalog which inherits from multiple variability mixins
        (in this case, TestVariability and VariabilityStars).  This is to make sure that
        the register_method and register_class decorators do not mangle inheritance of
        methods from mixins.
        """
        makeHybridTable()
        myDB = CatalogDBObject.from_objid('hybridTest')
        myCatalog = myDB.getCatalog('testVariabilityCatalog', obs_metadata=self.obs_metadata)
        myCatalog.write_catalog('hybridTestCatalog.dat', chunk_size=1000)

        if os.path.exists('hybridTestCatalog.dat'):
            os.unlink('hybridTestCatalog.dat')

        # make sure order of mixin inheritance does not matter
        myCatalog = myDB.getCatalog('otherVariabilityCatalog', obs_metadata=self.obs_metadata)
        myCatalog.write_catalog('hybridTestCatalog.dat', chunk_size=1000)

        if os.path.exists('hybridTestCatalog.dat'):
            os.unlink('hybridTestCatalog.dat')

        # make sure that, if a catalog does not contain a variability method,
        # an error is thrown; verify that it contains the correct error message
        myCatalog = myDB.getCatalog('stellarVariabilityCatalog', obs_metadata=self.obs_metadata)
        with self.assertRaises(RuntimeError) as context:
            myCatalog.write_catalog('hybridTestCatalog.dat')

        expectedMessage = "Your InstanceCatalog does not contain a variability method"
        expectedMessage += " corresponding to 'testVar'"
        self.assertTrue(context.exception.message==expectedMessage)

        if os.path.exists('hybridTestCatalog.dat'):
            os.unlink('hybridTestCatalog.dat')
    def testNonsenseCircularConstraints(self):
        """
        Test that a query performed on a circle bound gets all of the objects (and only all
        of the objects) within that circle
        """

        myNonsense = CatalogDBObject.from_objid('Nonsense')

        radius = 20.0
        raCenter = 210.0
        decCenter = -60.0

        mycolumns = [
            'NonsenseId', 'NonsenseRaJ2000', 'NonsenseDecJ2000', 'NonsenseMag'
        ]

        circObsMd = ObservationMetaData(boundType='circle',
                                        unrefractedRA=raCenter,
                                        unrefractedDec=decCenter,
                                        boundLength=radius,
                                        mjd=52000.,
                                        bandpassName='r')

        circQuery = myNonsense.query_columns(colnames=mycolumns,
                                             obs_metadata=circObsMd,
                                             chunk_size=100)

        raCenter = numpy.radians(raCenter)
        decCenter = numpy.radians(decCenter)
        radius = numpy.radians(radius)

        goodPoints = []

        for chunk in circQuery:
            for row in chunk:
                distance = haversine(raCenter, decCenter, row[1], row[2])

                self.assertTrue(distance < radius)

                dex = numpy.where(self.baselineData['id'] == row[0])[0][0]

                #store a list of which objects fell within our circle bound
                goodPoints.append(row[0])

                self.assertAlmostEqual(
                    numpy.radians(self.baselineData['ra'][dex]), row[1], 3)
                self.assertAlmostEqual(
                    numpy.radians(self.baselineData['dec'][dex]), row[2], 3)
                self.assertAlmostEqual(self.baselineData['mag'][dex], row[3],
                                       3)

        for entry in [
                xx for xx in self.baselineData if xx[0] not in goodPoints
        ]:
            #make sure that all of the points not returned by the query were, in fact, outside of
            #the circle bound
            distance = haversine(raCenter, decCenter, numpy.radians(entry[1]),
                                 numpy.radians(entry[2]))
            self.assertTrue(distance > radius)
Exemple #14
0
    def testNonsenseBoxConstraints(self):
        """
        Test that a query performed on a box bound gets all of the points (and only all of the
        points) inside that box bound.
        """

        myNonsense = CatalogDBObject.from_objid('Nonsense')

        raMin = 50.0
        raMax = 150.0
        decMax = 30.0
        decMin = -20.0

        raCenter = 0.5*(raMin+raMax)
        decCenter = 0.5*(decMin+decMax)

        mycolumns = ['NonsenseId', 'NonsenseRaJ2000', 'NonsenseDecJ2000', 'NonsenseMag']

        boxObsMd = ObservationMetaData(boundType='box', pointingDec=decCenter,  pointingRA=raCenter,
                   boundLength=numpy.array([0.5*(raMax-raMin), 0.5*(decMax-decMin)]), mjd=52000., bandpassName='r')

        boxQuery = myNonsense.query_columns(obs_metadata=boxObsMd, chunk_size=100, colnames=mycolumns)

        raMin = numpy.radians(raMin)
        raMax = numpy.radians(raMax)
        decMin = numpy.radians(decMin)
        decMax = numpy.radians(decMax)

        goodPoints = []

        ct = 0
        for chunk in boxQuery:
            for row in chunk:
                ct += 1
                self.assertLess(row[1], raMax)
                self.assertGreater(row[1], raMin)
                self.assertLess(row[2], decMax)
                self.assertGreater(row[2], decMin)

                dex = numpy.where(self.baselineData['id'] == row[0])[0][0]

                #keep a list of which points were returned by teh query
                goodPoints.append(row[0])

                self.assertAlmostEqual(numpy.radians(self.baselineData['ra'][dex]), row[1], 3)
                self.assertAlmostEqual(numpy.radians(self.baselineData['dec'][dex]), row[2], 3)
                self.assertAlmostEqual(self.baselineData['mag'][dex], row[3], 3)

        self.assertGreater(ct, 0)

        ct = 0
        for entry in [xx for xx in self.baselineData if xx[0] not in goodPoints]:
            #make sure that the points not returned by the query are, in fact, outside of the
            #box bound

            switch = (entry[1] > raMax or entry[1] < raMin or entry[2] >decMax or entry[2] < decMin)
            self.assertTrue(switch)
            ct += 1
        self.assertGreater(ct, 0)
    def testEb(self):
        makeEbTable()
        myDB = CatalogDBObject.from_objid('ebTest')
        myCatalog = myDB.getCatalog('stellarVariabilityCatalog', obs_metadata=self.obs_metadata)
        myCatalog.write_catalog('ebTestCatalog.dat', chunk_size=1000)

        if os.path.exists('ebTestCatalog.dat'):
            os.unlink('ebTestCatalog.dat')
        def testCannotBeNull(self):
            """
            Test to make sure that the code for filtering out rows with null values
            in key rowss works.
            """

            #each of these classes flags a different column with a different datatype as cannot_be_null
            availableCatalogs = [floatCannotBeNullCatalog, strCannotBeNullCatalog, unicodeCannotBeNullCatalog]
            dbobj = CatalogDBObject.from_objid('cannotBeNull')

            for catClass in availableCatalogs:
                cat = catClass(dbobj)
                fileName = 'cannotBeNullTestFile.txt'
                cat.write_catalog(fileName)
                dtype = numpy.dtype([('id',int),('n1',numpy.float64),('n2',numpy.float64),('n3',numpy.float64),
                                     ('n4',(str,40)), ('n5',(unicode,40))])
                testData = numpy.genfromtxt(fileName,dtype=dtype,delimiter=',')

                j = 0 #a counter to keep track of the rows read in from the catalog

                for i in range(len(self.baselineOutput)):

                    #self.baselineOutput contains all of the rows from the dbobj
                    #first, we must assess whether or not the row we are currently
                    #testing would, in fact, pass the cannot_be_null test
                    validLine = True
                    if isinstance(self.baselineOutput[cat.cannot_be_null[0]][i],str) or \
                       isinstance(self.baselineOutput[cat.cannot_be_null[0]][i],unicode):

                        if self.baselineOutput[cat.cannot_be_null[0]][i].strip().lower() == 'none':
                            validLine = False
                    else:
                        if numpy.isnan(self.baselineOutput[cat.cannot_be_null[0]][i]):
                            validLine = False

                    if validLine:
                        #if the row in self.baslineOutput should be in the catalog, we now check
                        #that baseline and testData agree on column values (there are some gymnastics
                        #here because you cannot do an == on NaN's
                        for (k,xx) in enumerate(self.baselineOutput[i]):
                            if k<4:
                                if not numpy.isnan(xx):
                                    msg = 'k: %d -- %s %s -- %s' % (k,str(xx),str(testData[j][k]),cat.cannot_be_null)
                                    self.assertAlmostEqual(xx, testData[j][k],3, msg=msg)
                                else:
                                    self.assertTrue(numpy.isnan(testData[j][k]))
                            else:
                                msg = '%s (%s) is not %s (%s)' % (xx,type(xx),testData[j][k],type(testData[j][k]))
                                self.assertEqual(xx.strip(),testData[j][k].strip(), msg=msg)
                        j+=1

                self.assertEqual(i,99) #make sure that we tested all of the baseline rows
                self.assertEqual(j,len(testData)) #make sure that we tested all of the testData rows
                msg = '%d >= %d' % (j,i)
                self.assertTrue(j<i, msg=msg) #make sure that some rows did not make it into the catalog

            if os.path.exists(fileName):
                os.unlink(fileName)
Exemple #17
0
 def __init__(self, objid, phosimCatalogObject):
     while True:
     # This loop is a workaround for UW catsim db connection intermittency.
         try:
             self.db_obj = CatalogDBObject.from_objid(objid)
             break
         except RuntimeError:
             continue
     self.cat_obj = phosimCatalogObject
    def testAgn(self):

        makeAgnTable()
        myDB = CatalogDBObject.from_objid('agnTest')
        myCatalog = myDB.getCatalog('galaxyVariabilityCatalog', obs_metadata=self.obs_metadata)
        myCatalog.write_catalog('agnTestCatalog.dat', chunk_size=1000)

        if os.path.exists('agnTestCatalog.dat'):
            os.unlink('agnTestCatalog.dat')
 def __init__(self, objid, phosimCatalogObject):
     while True:
         # This loop is a workaround for UW catsim db connection intermittency.
         try:
             self.db_obj = CatalogDBObject.from_objid(objid)
             break
         except RuntimeError:
             continue
     self.cat_obj = phosimCatalogObject
 def testArbitraryQuery(self):
     """
     Test method to directly execute an arbitrary SQL query (inherited from DBObject class)
     """
     myNonsense = CatalogDBObject.from_objid('Nonsense')
     query = 'SELECT test.id, test.mag, test2.id, test2.mag FROM test, test2 WHERE test.id=test2.id'
     results = myNonsense.execute_arbitrary(query)
     self.assertEqual(len(results),1250)
     for row in results:
         self.assertEqual(row[0],row[2])
         self.assertAlmostEqual(row[1],0.5*row[3],6)
 def testArbitraryQuery(self):
     """
     Test method to directly execute an arbitrary SQL query (inherited from DBObject class)
     """
     myNonsense = CatalogDBObject.from_objid('Nonsense')
     query = 'SELECT test.id, test.mag, test2.id, test2.mag FROM test, test2 WHERE test.id=test2.id'
     results = myNonsense.execute_arbitrary(query)
     self.assertEqual(len(results), 1250)
     for row in results:
         self.assertEqual(row[0], row[2])
         self.assertAlmostEqual(row[1], 0.5 * row[3], 6)
    def testNonsenseArbitraryConstraints(self):
        """
        Test a query with a user-specified constraint on the magnitude column
        """

        myNonsense = CatalogDBObject.from_objid('Nonsense')

        raMin = 50.0
        raMax = 150.0
        decMax = 30.0
        decMin = -20.0
        raCenter=0.5*(raMin+raMax)
        decCenter=0.5*(decMin+decMax)

        mycolumns = ['NonsenseId','NonsenseRaJ2000','NonsenseDecJ2000','NonsenseMag']

        boxObsMd = ObservationMetaData(boundType='box',unrefractedRA=raCenter,unrefractedDec=decCenter,
                    boundLength=numpy.array([0.5*(raMax-raMin),0.5*(decMax-decMin)]), mjd=52000.,bandpassName='r')

        boxQuery = myNonsense.query_columns(colnames = mycolumns,
                      obs_metadata=boxObsMd, chunk_size=100, constraint = 'mag > 11.0')

        raMin = numpy.radians(raMin)
        raMax = numpy.radians(raMax)
        decMin = numpy.radians(decMin)
        decMax = numpy.radians(decMax)

        goodPoints = []

        for chunk in boxQuery:
            for row in chunk:

                self.assertTrue(row[1]<raMax)
                self.assertTrue(row[1]>raMin)
                self.assertTrue(row[2]<decMax)
                self.assertTrue(row[2]>decMin)
                self.assertTrue(row[3]>11.0)

                dex = numpy.where(self.baselineData['id'] == row[0])[0][0]

                #keep a list of the points returned by the query
                goodPoints.append(row[0])

                self.assertAlmostEqual(numpy.radians(self.baselineData['ra'][dex]),row[1],3)
                self.assertAlmostEqual(numpy.radians(self.baselineData['dec'][dex]),row[2],3)
                self.assertAlmostEqual(self.baselineData['mag'][dex],row[3],3)

        for entry in [xx for xx in self.baselineData if xx[0] not in goodPoints]:
            #make sure that the points not returned by the query did, in fact, violate one of the
            #constraints of the query (either the box bound or the magnitude cut off)
            switch = (entry[1] > raMax or entry[1] < raMin or entry[2] >decMax or entry[2] < decMin or entry[3]<11.0)

            self.assertTrue(switch)
    def testNonsenseBoxConstraints(self):
        """
        Test that a query performed on a box bound gets all of the points (and only all of the
        points) inside that box bound.
        """

        myNonsense = CatalogDBObject.from_objid('Nonsense')

        raMin = 50.0
        raMax = 150.0
        decMax = 30.0
        decMin = -20.0

        raCenter = 0.5*(raMin+raMax)
        decCenter = 0.5*(decMin+decMax)

        mycolumns = ['NonsenseId','NonsenseRaJ2000','NonsenseDecJ2000','NonsenseMag']

        boxObsMd = ObservationMetaData(boundType='box',unrefractedDec=decCenter, unrefractedRA=raCenter,
                   boundLength=numpy.array([0.5*(raMax-raMin),0.5*(decMax-decMin)]),mjd=52000.,bandpassName='r')

        boxQuery = myNonsense.query_columns(obs_metadata=boxObsMd, chunk_size=100, colnames=mycolumns)

        raMin = numpy.radians(raMin)
        raMax = numpy.radians(raMax)
        decMin = numpy.radians(decMin)
        decMax = numpy.radians(decMax)

        goodPoints = []

        for chunk in boxQuery:
            for row in chunk:
                self.assertTrue(row[1]<raMax)
                self.assertTrue(row[1]>raMin)
                self.assertTrue(row[2]<decMax)
                self.assertTrue(row[2]>decMin)

                dex = numpy.where(self.baselineData['id'] == row[0])[0][0]

                #keep a list of which points were returned by teh query
                goodPoints.append(row[0])

                self.assertAlmostEqual(numpy.radians(self.baselineData['ra'][dex]),row[1],3)
                self.assertAlmostEqual(numpy.radians(self.baselineData['dec'][dex]),row[2],3)
                self.assertAlmostEqual(self.baselineData['mag'][dex],row[3],3)

        for entry in [xx for xx in self.baselineData if xx[0] not in goodPoints]:
            #make sure that the points not returned by the query are, in fact, outside of the
            #box bound

            switch = (entry[1] > raMax or entry[1] < raMin or entry[2] >decMax or entry[2] < decMin)
            self.assertTrue(switch)
    def testRealQueryConstraints(self):
        mystars = CatalogDBObject.from_objid('testCatalogDBObjectTeststars')
        mycolumns = ['id','raJ2000','decJ2000','umag','gmag','rmag','imag','zmag','ymag']

        #recall that ra and dec are stored in degrees in the data base
        myquery = mystars.query_columns(colnames = mycolumns,
                                        constraint = 'ra < 90. and ra > 45.')

        tol=1.0e-3
        for chunk in myquery:
            for star in chunk:
                self.assertTrue(numpy.degrees(star[1])<90.0+tol)
                self.assertTrue(numpy.degrees(star[1])>45.0-tol)
    def testChunking(self):
        """
        Test that a query with a specified chunk_size does, in fact, return chunks of that size
        """

        mystars = CatalogDBObject.from_objid('testCatalogDBObjectTeststars')
        mycolumns = ['id','raJ2000','decJ2000','umag','gmag']
        myquery = mystars.query_columns(colnames = mycolumns, chunk_size = 1000)

        for chunk in myquery:
            self.assertEqual(chunk.size,1000)
            for row in chunk:
                self.assertTrue(len(row),5)
    def testChunking(self):
        """
        Test that a query with a specified chunk_size does, in fact, return chunks of that size
        """

        mystars = CatalogDBObject.from_objid('testCatalogDBObjectTeststars')
        mycolumns = ['id', 'raJ2000', 'decJ2000', 'umag', 'gmag']
        myquery = mystars.query_columns(colnames=mycolumns, chunk_size=1000)

        for chunk in myquery:
            self.assertEqual(chunk.size, 1000)
            for row in chunk:
                self.assertTrue(len(row), 5)
Exemple #27
0
 def testArbitraryChunkIterator(self):
     """
     Test method to create a ChunkIterator from an arbitrary SQL query (inherited from DBObject class)
     """
     myNonsense = CatalogDBObject.from_objid('Nonsense')
     query = 'SELECT test.id, test.mag, test2.id, test2.mag FROM test, test2 WHERE test.id=test2.id'
     dtype = numpy.dtype([('id1', int), ('mag1', float), ('id2', int), ('mag2', float)])
     results = myNonsense.get_chunk_iterator(query, chunk_size=100, dtype=dtype)
     i = 0
     for chunk in results:
         for row in chunk:
             self.assertEqual(row[0], row[2])
             self.assertAlmostEqual(row[1], 0.5*row[3], 6)
             i += 1
     self.assertEqual(i, 1250)
    def testAmcvn(self):
        #Note: this test assumes that the parameters for the Amcvn variability
        #model occur in a standard varParamStr column in the database.
        #Actually, the current database of Amcvn events simply store the variability
        #parameters as independent columns in the database.
        #The varParamStr formalism is how the applyAmcvn method is written, however,
        #so that is what we will test.

        makeAmcvnTable()
        myDB = CatalogDBObject.from_objid('amcvnTest')
        myCatalog = myDB.getCatalog('stellarVariabilityCatalog', obs_metadata=self.obs_metadata)
        myCatalog.write_catalog('amcvnTestCatalog.dat', chunk_size=1000)

        if os.path.exists('amcvnTestCatalog.dat'):
            os.unlink('amcvnTestCatalog.dat')
 def testArbitraryChunkIterator(self):
     """
     Test method to create a ChunkIterator from an arbitrary SQL query (inherited from DBObject class)
     """
     myNonsense = CatalogDBObject.from_objid('Nonsense')
     query = 'SELECT test.id, test.mag, test2.id, test2.mag FROM test, test2 WHERE test.id=test2.id'
     dtype = numpy.dtype([('id1',int),('mag1',float),('id2',int),('mag2',float)])
     results = myNonsense.get_chunk_iterator(query,chunk_size=100,dtype=dtype)
     i = 0
     for chunk in results:
         for row in chunk:
             self.assertEqual(row[0],row[2])
             self.assertAlmostEqual(row[1],0.5*row[3],6)
             i += 1
     self.assertEqual(i,1250)
Exemple #30
0
    def testRealQueryConstraints(self):
        mystars = CatalogDBObject.from_objid('testCatalogDBObjectTeststars')
        mycolumns = ['id', 'raJ2000', 'decJ2000', 'umag', 'gmag', 'rmag', 'imag', 'zmag', 'ymag']

        #recall that ra and dec are stored in degrees in the data base
        myquery = mystars.query_columns(colnames = mycolumns,
                                        constraint = 'ra < 90. and ra > 45.')

        tol=1.0e-3
        ct = 0
        for chunk in myquery:
            for star in chunk:
                ct += 1
                self.assertLess(numpy.degrees(star[1]), 90.0+tol)
                self.assertGreater(numpy.degrees(star[1]), 45.0-tol)
        self.assertGreater(ct, 0)
    def testNonsenseCircularConstraints(self):
        """
        Test that a query performed on a circle bound gets all of the objects (and only all
        of the objects) within that circle
        """

        myNonsense = CatalogDBObject.from_objid('Nonsense')

        radius = 20.0
        raCenter = 210.0
        decCenter = -60.0

        mycolumns = ['NonsenseId','NonsenseRaJ2000','NonsenseDecJ2000','NonsenseMag']

        circObsMd = ObservationMetaData(boundType='circle', unrefractedRA=raCenter,unrefractedDec=decCenter,
                                        boundLength=radius, mjd=52000., bandpassName='r')

        circQuery = myNonsense.query_columns(colnames = mycolumns, obs_metadata=circObsMd, chunk_size=100)

        raCenter = numpy.radians(raCenter)
        decCenter = numpy.radians(decCenter)
        radius = numpy.radians(radius)

        goodPoints = []

        for chunk in circQuery:
            for row in chunk:
                distance = haversine(raCenter,decCenter,row[1],row[2])

                self.assertTrue(distance<radius)

                dex = numpy.where(self.baselineData['id'] == row[0])[0][0]

                #store a list of which objects fell within our circle bound
                goodPoints.append(row[0])

                self.assertAlmostEqual(numpy.radians(self.baselineData['ra'][dex]),row[1],3)
                self.assertAlmostEqual(numpy.radians(self.baselineData['dec'][dex]),row[2],3)
                self.assertAlmostEqual(self.baselineData['mag'][dex],row[3],3)


        for entry in [xx for xx in self.baselineData if xx[0] not in goodPoints]:
            #make sure that all of the points not returned by the query were, in fact, outside of
            #the circle bound
            distance = haversine(raCenter,decCenter,numpy.radians(entry[1]),numpy.radians(entry[2]))
            self.assertTrue(distance>radius)
Exemple #32
0
    def testArbitraryQuery_passConnection(self):
        """
        Test method to directly execute an arbitrary SQL query (inherited from DBObject class)

        Pass connection directly in to the constructor.
        """
        myNonsense_base = CatalogDBObject.from_objid('Nonsense')
        myNonsense = myNonsenseDB_noConnection(connection=myNonsense_base.connection)
        query = 'SELECT test.id, test.mag, test2.id, test2.mag FROM test, test2 WHERE test.id=test2.id'
        results = myNonsense.execute_arbitrary(query)
        self.assertEqual(len(results), 1250)
        ct = 0
        for row in results:
            ct += 1
            self.assertEqual(row[0], row[2])
            self.assertAlmostEqual(row[1], 0.5*row[3], 6)
        self.assertGreater(ct, 0)
def catsim_query_stack10(objid, constraint, catalog, radius, opsim_metadata):
    from lsst.sims.catalogs.generation.db import CatalogDBObject
    """ Query catsim and make a catalog """

    obs_metadata = ObservationMetaData(
        boundType='circle',
        unrefractedRA=opsim_metadata[1] * 180 / pi,
        unrefractedDec=opsim_metadata[2] * 180 / pi,
        boundLength=radius,
        mjd=opsim_metadata[5])
    dbobj = CatalogDBObject.from_objid(objid)

    t = dbobj.getCatalog(catalog,
                         obs_metadata=obs_metadata,
                         constraint=constraint)
    #    filename = 'test_reference.dat'
    #    t.write_catalog(filename, chunk_size=10)
    return t, obs_metadata
def exampleReferenceCatalog():
    """
    This method outputs a reference catalog of galaxies (i.e. a catalog of
    galaxies in which the columns are simply the data stored in the database).

    The catalog class is defined in
    python/lsst/sims/catUtils/exampleCatalogDefinitions/refCatalogExamples.py

    The catalog is output to the file test_reference.dat
    """

    obs_metadata = ObservationMetaData(boundType='circle',unrefractedRA=0.0,unrefractedDec=0.0,
                       boundLength=0.01)
    dbobj = CatalogDBObject.from_objid('galaxyBase')

    t = dbobj.getCatalog('ref_catalog_galaxy', obs_metadata=obs_metadata)
    filename = 'test_reference.dat'
    t.write_catalog(filename, chunk_size=10)
 def testObsCat(self):
     objname = 'wdstars'
     dbobj = CatalogDBObject.from_objid(objname)
     obs_metadata = dbobj.testObservationMetaData
     # To cover the central ~raft
     obs_metadata.boundLength = 0.4
     opsMetadata = {'Opsim_rotskypos':(0., float),
                    'Unrefracted_RA':(numpy.radians(obs_metadata.unrefractedRA), float),
                    'Unrefracted_Dec':(numpy.radians(obs_metadata.unrefractedDec), float)}
     obs_metadata.phoSimMetaData = opsMetadata
     cat = dbobj.getCatalog('obs_star_cat', obs_metadata)
     if os.path.exists('testCat.out'):
         os.unlink('testCat.out')
     try:
         cat.write_catalog('testCat.out')
     finally:
         if os.path.exists('testCat.out'):
             os.unlink('testCat.out')
    def testObsCat(self):
        objname = 'wdstars'
        catName = os.path.join(getPackageDir('sims_catUtils'), 'tests', 'scratchSpace',
                               'testObsCat.txt')

        try:
            dbobj = CatalogDBObject.from_objid(objname)
            obs_metadata = dbobj.testObservationMetaData
            # To cover the central ~raft
            obs_metadata.boundLength = 0.4
            opsMetadata = {'Opsim_rotskypos':(0., float),
                           'pointingRA':(numpy.radians(obs_metadata.pointingRA), float),
                           'pointingDec':(numpy.radians(obs_metadata.pointingDec), float)}
            obs_metadata.phoSimMetaData = opsMetadata
            cat = dbobj.getCatalog('obs_star_cat', obs_metadata)
            if os.path.exists(catName):
                os.unlink(catName)
            try:
                cat.write_catalog(catName)
                dtypeList = [(name, numpy.float) for name in cat._column_outputs]
                testData = numpy.genfromtxt(catName, delimiter = ', ',
                                            dtype=numpy.dtype(dtypeList))
                self.assertGreater(len(testData), 0)
            finally:
                if os.path.exists(catName):
                    os.unlink(catName)

            print '\ntestObsCat successfully connected to fatboy'

        except:
            trace = traceback.extract_tb(sys.exc_info()[2], limit=20)
            msg = sys.exc_info()[1].args[0]
            if 'Failed to connect' in msg or failedOnFatboy(trace):

                # if the exception was because of a failed connection
                # to fatboy, ignore it.

                print '\ntestObsCat failed to connect to fatboy'
                print 'Sometimes that happens.  Do not worry.'

                pass
            else:
                raise
    def setUpClass(cls):
        # delete previous test db if present
        if os.path.exists('testData/sncat.db'):
            print 'deleting previous database'
            os.unlink('testData/sncat.db')

        mjds = [570123.15 + 3.*i for i in range(2)]
        galDB = CatalogDBObject.from_objid('galaxyTiled')

        for i, myMJD in enumerate(mjds):
            myObsMD = ObservationMetaData(boundType='circle',
                                          boundLength=0.015,
                                          unrefractedRA=5.0,
                                          unrefractedDec=15.0,
                                          bandpassName=
                                          ['u', 'g', 'r', 'i', 'z', 'y'],
                                          mjd=myMJD)
            catalog = SNIaCatalog(db_obj=galDB, obs_metadata=myObsMD)
            fname = "testData/SNIaCat_" + str(i) + ".txt"
            print fname, myObsMD.mjd
            catalog.write_catalog(fname)
def examplePhoSimNoOpSim():
    """
    This method outputs phoSim input files based on arbitrary input coordinates
    (rather than an OpSim pointing).

    catalog_test_stars_rd.dat is a file created from a specified RA, Dec pointing

    catalog_test_stars_aa.dat is a file created from a specified Alt, Az pointing
    """
    raDeg= 15.
    decDeg = -30.
    mjd = 51999.75

    #source code for the method below is found in python.lsst.sims.catUtils.observationMetaDataUtils.py
    #
    #basically, it returns a dict of data needed by phoSim to specify a given pointing
    #(ra and dec of the moon, rotation of the sky relative to the telescope, etc.)
    md =  makeObsParamsRaDecTel(math.radians(raDeg), math.radians(decDeg), mjd, 'r')

    obs_metadata_rd = ObservationMetaData(boundType='circle',
                                          boundLength=0.1,
                                          mjd=mjd,
                                          phoSimMetaData=md)
    azRad = math.radians(220.)
    altRad = math.radians(79.)
    md = makeObsParamsAzAltTel(azRad, altRad, mjd, 'r')
    raDeg = math.degrees(md['Unrefracted_RA'][0])
    decDeg = math.degrees(md['Unrefracted_Dec'][0])
    obs_metadata_aa = ObservationMetaData(boundType='circle',
                                          boundLength=0.1,
                                          mjd=mjd,
                                          phoSimMetaData=md)

    dbobj = CatalogDBObject.from_objid('msstars')
    t = dbobj.getCatalog('phoSim_catalog_POINT', obs_metadata= obs_metadata_rd)
    t.write_catalog('catalog_test_stars_rd.dat')
    t = dbobj.getCatalog('phoSim_catalog_POINT', obs_metadata= obs_metadata_aa)
    t.write_catalog('catalog_test_stars_aa.dat')
    # Set the min to 15 since we saturate there. CatSim max is 28
    bins = np.arange(15., 28.2, .2)
    starDensity = np.zeros((hp.nside2npix(nside), np.size(bins) - 1),
                           dtype=float)
    overMaxMask = np.zeros(hp.nside2npix(nside), dtype=bool)
    lat, ra = hp.pix2ang(nside, np.arange(0, hp.nside2npix(nside)))
    dec = np.pi / 2. - lat

    # Square root of pixel area.
    hpsizeDeg = hp.nside2resol(nside, arcmin=True) / 60.

    # Limit things to a 10 arcmin radius
    hpsizeDeg = np.min([10. / 60., hpsizeDeg])

    # Options include galaxyBase, cepheidstars, wdstars, rrlystars, msstars, bhbstars, allstars, and more...
    dbobj = CatalogDBObject.from_objid(args.stars)

    indxMin = 0

    restoreFile = glob.glob('starDensity_%s_%s_nside_%i.npz' %
                            (filterName, starNames, nside))
    if len(restoreFile) > 0:
        data = np.load(restoreFile[0])
        starDensity = data['starDensity'].copy()
        indxMin = data['icheck'].copy()
        overMaxMask = data['overMaxMask'].copy()

    print ''
    # Look at a cirular area the same area as the healpix it's centered on.
    boundLength = hpsizeDeg / np.pi**0.5
Exemple #40
0
from lsst.sims.catUtils.exampleCatalogDefinitions.phoSimCatalogExamples import \
        PhoSimCatalogPoint, PhoSimCatalogSersic2D, PhoSimCatalogZPoint
from sprinkler import sprinklerAGN

from lsst.sims.catUtils.baseCatalogModels import *

starObjNames = ['msstars', 'bhbstars', 'wdstars', 'rrlystars', 'cepheidstars']

obsMD = OpSim3_61DBObject()
obs_metadata = obsMD.getObservationMetaData(88625744,
                                            0.05,
                                            makeCircBounds=True)

doHeader = True
for starName in starObjNames:
    stars = CatalogDBObject.from_objid(starName)
    star_phoSim = PhoSimCatalogPoint(
        stars, obs_metadata=obs_metadata)  #the class for phoSim input files
    #containing point sources
    if (doHeader):
        with open("phoSim_example.txt", "w") as fh:
            star_phoSim.write_header(fh)
        doHeader = False

    #below, write_header=False prevents the code from overwriting the header just written
    #write_mode = 'a' allows the code to append the new objects to the output file, rather
    #than overwriting the file for each different class of object.
    star_phoSim.write_catalog("phoSim_example.txt",
                              write_mode='a',
                              write_header=False,
                              chunk_size=20000)
Exemple #41
0
    def testClassVariables(self):
        """
        Make sure that the daughter classes of CatalogDBObject properly overwrite the member
        variables of CatalogDBObject
        """

        mystars = CatalogDBObject.from_objid('testCatalogDBObjectTeststars')
        myNonsense = CatalogDBObject.from_objid('Nonsense')
        mygalaxies = CatalogDBObject.from_objid('testCatalogDBObjectTestgals')

        self.assertEqual(mystars.raColName, 'ra')
        self.assertEqual(mystars.decColName, 'decl')
        self.assertEqual(mystars.idColKey, 'id')
        self.assertEqual(mystars.driver, 'sqlite')
        self.assertEqual(mystars.database, 'testCatalogDBObjectDatabase.db')
        self.assertEqual(mystars.appendint, 1023)
        self.assertEqual(mystars.tableid, 'stars')
        self.assertFalse(hasattr(mystars, 'spatialModel'))
        self.assertEqual(mystars.objid, 'testCatalogDBObjectTeststars')

        self.assertEqual(mygalaxies.raColName, 'ra')
        self.assertEqual(mygalaxies.decColName, 'decl')
        self.assertEqual(mygalaxies.idColKey, 'id')
        self.assertEqual(mygalaxies.driver, 'sqlite')
        self.assertEqual(mygalaxies.database, 'testCatalogDBObjectDatabase.db')
        self.assertEqual(mygalaxies.appendint, 1022)
        self.assertEqual(mygalaxies.tableid, 'galaxies')
        self.assertTrue(hasattr(mygalaxies, 'spatialModel'))
        self.assertEqual(mygalaxies.spatialModel, 'SERSIC2D')
        self.assertEqual(mygalaxies.objid, 'testCatalogDBObjectTestgals')

        self.assertEqual(myNonsense.raColName, 'ra')
        self.assertEqual(myNonsense.decColName, 'dec')
        self.assertEqual(myNonsense.idColKey, 'NonsenseId')
        self.assertEqual(myNonsense.driver, 'sqlite')
        self.assertEqual(myNonsense.database, 'testCatalogDBObjectNonsenseDB.db')
        self.assertFalse(hasattr(myNonsense, 'appendint'))
        self.assertEqual(myNonsense.tableid, 'test')
        self.assertFalse(hasattr(myNonsense, 'spatialModel'))
        self.assertEqual(myNonsense.objid, 'Nonsense')

        self.assertIn('teststars', CatalogDBObject.registry)
        self.assertIn('testgals', CatalogDBObject.registry)
        self.assertIn('testCatalogDBObjectTeststars', CatalogDBObject.registry)
        self.assertIn('testCatalogDBObjectTestgals', CatalogDBObject.registry)
        self.assertIn('Nonsense', CatalogDBObject.registry)

        colsShouldBe = [('id', None, int), ('raJ2000', 'ra*%f'%(numpy.pi/180.)),
                        ('decJ2000', 'decl*%f'%(numpy.pi/180.)),
                        ('parallax', 'parallax*%.15f'%(numpy.pi/(648000000.0))),
                        ('properMotionRa', 'properMotionRa*%.15f'%(numpy.pi/180.)),
                        ('properMotionDec', 'properMotionDec*%.15f'%(numpy.pi/180.)),
                        ('umag', None), ('gmag', None), ('rmag', None), ('imag', None),
                        ('zmag', None), ('ymag', None),
                        ('magNorm', 'mag_norm', float)]

        for (col, coltest) in zip(mystars.columns, colsShouldBe):
            self.assertEqual(col, coltest)

        colsShouldBe = [('NonsenseId', 'id', int),
               ('NonsenseRaJ2000', 'ra*%f'%(numpy.pi/180.)),
               ('NonsenseDecJ2000', 'dec*%f'%(numpy.pi/180.)),
               ('NonsenseMag', 'mag', float)]

        for (col, coltest) in zip(myNonsense.columns, colsShouldBe):
            self.assertEqual(col, coltest)

        colsShouldBe = [('id', None, int),
               ('raJ2000', 'ra*%f'%(numpy.pi/180.)),
               ('decJ2000', 'decl*%f'%(numpy.pi/180.)),
               ('umag', None),
               ('gmag', None),
               ('rmag', None),
               ('imag', None),
               ('zmag', None),
               ('ymag', None),
               ('magNormAgn', 'mag_norm_agn', None),
               ('magNormDisk', 'mag_norm_disk', None),
               ('magNormBulge', 'mag_norm_bulge', None),
               ('redshift', None),
               ('a_disk', None),
               ('b_disk', None),
               ('a_bulge', None),
               ('b_bulge', None)]

        for (col, coltest) in zip(mygalaxies.columns, colsShouldBe):
            self.assertEqual(col, coltest)
    def testCannotBeNull(self):
        """
            Test to make sure that the code for filtering out rows with null values
            in key rowss works.
            """

        #each of these classes flags a different column with a different datatype as cannot_be_null
        availableCatalogs = [
            floatCannotBeNullCatalog, strCannotBeNullCatalog,
            unicodeCannotBeNullCatalog
        ]
        dbobj = CatalogDBObject.from_objid('cannotBeNull')

        for catClass in availableCatalogs:
            cat = catClass(dbobj)
            fileName = 'cannotBeNullTestFile.txt'
            cat.write_catalog(fileName)
            dtype = numpy.dtype([('id', int), ('n1', numpy.float64),
                                 ('n2', numpy.float64), ('n3', numpy.float64),
                                 ('n4', (str, 40)), ('n5', (unicode, 40))])
            testData = numpy.genfromtxt(fileName, dtype=dtype, delimiter=',')

            j = 0  #a counter to keep track of the rows read in from the catalog

            for i in range(len(self.baselineOutput)):

                #self.baselineOutput contains all of the rows from the dbobj
                #first, we must assess whether or not the row we are currently
                #testing would, in fact, pass the cannot_be_null test
                validLine = True
                if isinstance(self.baselineOutput[cat.cannot_be_null[0]][i],str) or \
                   isinstance(self.baselineOutput[cat.cannot_be_null[0]][i],unicode):

                    if self.baselineOutput[cat.cannot_be_null[0]][i].strip(
                    ).lower() == 'none':
                        validLine = False
                else:
                    if numpy.isnan(
                            self.baselineOutput[cat.cannot_be_null[0]][i]):
                        validLine = False

                if validLine:
                    #if the row in self.baslineOutput should be in the catalog, we now check
                    #that baseline and testData agree on column values (there are some gymnastics
                    #here because you cannot do an == on NaN's
                    for (k, xx) in enumerate(self.baselineOutput[i]):
                        if k < 4:
                            if not numpy.isnan(xx):
                                msg = 'k: %d -- %s %s -- %s' % (
                                    k, str(xx), str(
                                        testData[j][k]), cat.cannot_be_null)
                                self.assertAlmostEqual(xx,
                                                       testData[j][k],
                                                       3,
                                                       msg=msg)
                            else:
                                self.assertTrue(numpy.isnan(testData[j][k]))
                        else:
                            msg = '%s (%s) is not %s (%s)' % (xx, type(
                                xx), testData[j][k], type(testData[j][k]))
                            self.assertEqual(xx.strip(),
                                             testData[j][k].strip(),
                                             msg=msg)
                    j += 1

            self.assertEqual(
                i, 99)  #make sure that we tested all of the baseline rows
            self.assertEqual(j, len(
                testData))  #make sure that we tested all of the testData rows
            msg = '%d >= %d' % (j, i)
            self.assertTrue(
                j < i, msg=msg
            )  #make sure that some rows did not make it into the catalog

        if os.path.exists(fileName):
            os.unlink(fileName)
        Note that because all angles are handled inside of the stack as radians,
        the returned angular distance will also be in radians
        """
        r0 = self.column_by_name('raJ2000')
        d0 = self.column_by_name('decJ2000')
        r1 = self.column_by_name('raObserved')
        d1 = self.column_by_name('decObserved')

        return haversine(r0, d0, r1, d1)




#write the catalog directly
myDB = CatalogDBObject.from_objid('allstars')
obs_metadata = ObservationMetaData(unrefractedRA=220.0, unrefractedDec=19.0,
                                   boundType='circle', boundLength=0.1,
                                   mjd=52000.0)

cat = TutorialCatalog(myDB, obs_metadata=obs_metadata)
cat.write_catalog('tutorial_astrometry_photometry.txt')



#write the catalog using CatalogDBObject.getCatalog()
obs_metadata = ObservationMetaData(unrefractedRA=120.0, unrefractedDec=-5.0,
                                   boundType='circle', boundLength=0.1,
                                   mjd=52000.0)

cat = myDB.getCatalog('tutorial_catalog', obs_metadata=obs_metadata)
    parser.add_argument('opsimDB', help='OpSim database sqlite file')
    parser.add_argument('-o',
                        '--outfile',
                        type=str,
                        default='twinkles_ref.txt',
                        help='Filename of output reference catalog')
    args = parser.parse_args()

    # you need to provide ObservationMetaDataGenerator with the connection
    # string to an OpSim output database.  This is the connection string
    # to a test database that comes when you install CatSim.
    generator = ObservationMetaDataGenerator(database=args.opsimDB,
                                             driver='sqlite')
    obsMetaDataResults = generator.getObservationMetaData(fieldRA=(53, 54),
                                                          fieldDec=(-29, -27),
                                                          boundLength=0.3)

    # First get the reference catalog
    stars = CatalogDBObject.from_objid('allstars')
    while True:
        try:
            ref_stars = TwinklesReference(stars,
                                          obs_metadata=obsMetaDataResults[0])
            break
        except RuntimeError:
            continue
    ref_stars.write_catalog(args.outfile,
                            write_mode='w',
                            write_header=True,
                            chunk_size=20000)
 def query_columns(self, *args, **kwargs):
     return CatalogDBObject.query_columns(self, *args, **kwargs)
#!/usr/bin/env python

# To benchmark an instance Catalog, first look at
# the instance of CatalogDBObject
import numpy as np

from benchmarkInstanceCatalogs import QueryBenchMarks
from lsst.sims.catalogs.generation.db import CatalogDBObject
import lsst.sims.catUtils.baseCatalogModels as bcm
from lsst.sims.catalogs.measures.instance import InstanceCatalog

galDB = CatalogDBObject.from_objid('galaxyTiled')

# Create a child of the InstanceCatalog Class
class galCopy(InstanceCatalog):
    column_outputs = ['id', 'raJ2000', 'decJ2000', 'redshift']
    override_formats = {'raJ2000': '%8e', 'decJ2000': '%8e'}

# Sizes to be used for querying
boundLens = np.arange(0.05, 1.8, 0.05)

# Instantiate a benchmark object
opsimDBHDF ='/astro/users/rbiswas/data/LSST/OpSimData/storage.h5'
gcb = QueryBenchMarks.fromOpSimDF(instanceCatChild=galCopy, dbObject=galDB,
                                  opSimHDF=opsimDBHDF, boundLens=boundLens,
                                  constraints='r_ab < 24.0',
                                  numSamps=3, name='magneto_test_rless24')
# Look at the size 
print gcb.coords.size
print gcb.boundLens.size
from lsst.sims.catalogs.generation.db import ObservationMetaData, CatalogDBObject
from lsst.sims.catUtils.baseCatalogModels import OpSim3_61DBObject
from lsst.sims.catUtils.exampleCatalogDefinitions.phoSimCatalogExamples import \
        PhoSimCatalogPoint, PhoSimCatalogSersic2D, PhoSimCatalogZPoint
from sprinkler import sprinklerAGN

from lsst.sims.catUtils.baseCatalogModels import *

starObjNames = ['msstars', 'bhbstars', 'wdstars', 'rrlystars', 'cepheidstars']

obsMD = OpSim3_61DBObject()
obs_metadata = obsMD.getObservationMetaData(88625744, 0.05, makeCircBounds = True)

doHeader= True
for starName in starObjNames:
    stars = CatalogDBObject.from_objid(starName)
    star_phoSim=PhoSimCatalogPoint(stars,obs_metadata=obs_metadata) #the class for phoSim input files
                                                                #containing point sources
    if (doHeader):
        with open("phoSim_example.txt","w") as fh:
            star_phoSim.write_header(fh)
        doHeader = False

    #below, write_header=False prevents the code from overwriting the header just written
    #write_mode = 'a' allows the code to append the new objects to the output file, rather
    #than overwriting the file for each different class of object.
    star_phoSim.write_catalog("phoSim_example.txt",write_mode='a',write_header=False,chunk_size=20000)

gals = CatalogDBObject.from_objid('galaxyBulge')

#now append a bunch of objects with 2D sersic profiles to our output file
Exemple #48
0
        return self.parameters[idx]




    



if __name__ == "__main__":


    print sncosmo.__file__
    print SNIa.__class__

    galDB =  CatalogDBObject.from_objid ('galaxyBase')
    catalogIterator = galDB.query_columns(colnames=['id','redshift', 'ra', 'dec', 
                                                   'mass_stellar'],
                                          constraint='redshift between 0.01 and 0.1')

    dtype = None
    for chunk in catalogIterator:
        if dtype is None:
            dtype = chunk.dtype
        for record in chunk:
            id = np.int(record['id'])
            mass = np.float(record['mass_stellar'])*10.0**10.0
            ra = np.float(record['ra'])
            dec = np.float(record['dec'])
            redshift = np.float(record['redshift'])
            sn = SNIa( galid=id, snid =id, ra=ra, dec=dec , z=redshift) 
    #If you want to use the LSST camera, uncomment the line below.
    #You can similarly assign any camera object you want here
    #camera = LsstSimMapper().camera


#select an OpSim pointing
obsMD = OpSim3_61DBObject()
obs_metadata = obsMD.getObservationMetaData(88625744, 0.05, makeCircBounds = True)

catName = './outputs_cats/galSim_galaxies_example.txt'


cstrn = "redshift>"+str(zdn)+" and redshift<="+str(zup)

bulges = CatalogDBObject.from_objid('galaxyBulge')
bulge_galSim = testGalSimGalaxies(bulges, obs_metadata=obs_metadata, column_outputs=['redshift'],constraint=cstrn)
bulge_galSim.noise_and_background = None
bulge_galSim.PSF = None
bulge_galSim.write_catalog(catName, write_header=True, write_mode='a')

print 'done with bulges'

disks = CatalogDBObject.from_objid('galaxyDisk')
disk_galSim = testGalSimGalaxies(disks, obs_metadata=obs_metadata, column_outputs=['redshift'],constraint=cstrn)
disk_galSim.copyGalSimInterpreter(bulge_galSim)
disk_galSim.noise_and_background = None
disk_galSim.PSF = None
disk_galSim.write_catalog(catName, write_header=True, write_mode='a')

print 'done with disks'