def testColumnNames(self): """ Test the method that returns the names of columns in a table """ dbobj = DBObject(driver=self.driver, database=self.database) names = dbobj.get_column_names('doubleTable') self.assertEqual(len(names), 3) self.assertTrue('id' in names) self.assertTrue('sqrt' in names) self.assertTrue('log' in names) names = dbobj.get_column_names('intTable') self.assertEqual(len(names), 3) self.assertTrue('id' in names) self.assertTrue('twice' in names) self.assertTrue('thrice' in names) names = dbobj.get_column_names() keys = ['doubleTable', 'intTable', 'junkTable'] for kk in names: self.assertTrue(kk in keys) self.assertEqual(len(names['doubleTable']), 3) self.assertEqual(len(names['intTable']), 3) self.assertTrue('id' in names['doubleTable']) self.assertTrue('sqrt' in names['doubleTable']) self.assertTrue('log' in names['doubleTable']) self.assertTrue('id' in names['intTable']) self.assertTrue('twice' in names['intTable']) self.assertTrue('thrice' in names['intTable'])
def testJoin(self): """ Test a join """ dbobj = DBObject(driver=self.driver, database=self.database) query = 'SELECT doubleTable.id, intTable.id, doubleTable.log, intTable.thrice ' query += 'FROM doubleTable, intTable WHERE doubleTable.id = intTable.id' results = dbobj.get_chunk_iterator(query, chunk_size=10) dtype = [('id', int), ('id_1', int), ('log', float), ('thrice', int)] i = 0 for chunk in results: if i < 90: self.assertEqual(len(chunk), 10) for row in chunk: self.assertEqual(2 * (i + 1), row[0]) self.assertEqual(row[0], row[1]) self.assertAlmostEqual(numpy.log(row[0]), row[2], 6) self.assertEqual(3 * row[0], row[3]) self.assertEqual(dtype, row.dtype) i += 1 self.assertEqual(i, 99) #make sure that we found all the matches whe should have results = dbobj.execute_arbitrary(query) self.assertEqual(dtype, results.dtype) i = 0 for row in results: self.assertEqual(2 * (i + 1), row[0]) self.assertEqual(row[0], row[1]) self.assertAlmostEqual(numpy.log(row[0]), row[2], 6) self.assertEqual(3 * row[0], row[3]) i += 1 self.assertEqual(i, 99)
class dbInterface(object): def __init__(self, database, host, port, driver): self._dbo = DBObject(database=database, host=host, port=port, driver=driver) def forcedSourceFromId(self, objectId): dtype = np.dtype([('object_id', np.int), ('ccd_visit_id', np.int), ('psf_flux', np.float), ('psf_flux_err', np.float), ('flags', np.int)]) query = """select objectId, ccdVisitId, psFlux, psFlux_Sigma, flags from ForcedSource where objectId = %i""" % objectId results = self._dbo.execute_arbitrary(query, dtype=dtype) return results def visitFromCcdVisitId(self, visitId): dtype = np.dtype([('visit_id', np.int), ('filter', str, 300), ('obs_start', str, 300)]) query = """select visitId, filterName, obsStart from CcdVisit where ccdVisitId = %i""" % visitId results = self._dbo.execute_arbitrary(query, dtype=dtype) return results def objectFromId(self, objectId): dtype = np.dtype([('object_id', np.int), ('parent_object_id', np.int), ('ra', np.float), ('dec', np.float)]) query = """select objectId, parentObjectId, psRa, psDecl from Object where objectId = %i""" % objectId results = self._dbo.execute_arbitrary(query, dtype=dtype) return results def objectFromRaDec(self, ra, dec, tol): dtype = np.dtype([('object_id', np.int), ('parent_object_id', np.int), ('ra', np.float), ('dec', np.float)]) query = """select objectId, parentObjectId, psRa, psDecl from Object where (psRa > %f) and (psRa < %f) and (psDecl > %f) and (psDecl < %f)""" % (ra - tol, ra + tol, dec - tol, dec + tol) results = self._dbo.execute_arbitrary(query, dtype=dtype) return results
def testTableNames(self): """ Test the method that returns the names of tables in a database """ dbobj = DBObject(driver=self.driver, database=self.database) names = dbobj.get_table_names() self.assertEqual(len(names), 3) self.assertTrue('doubleTable' in names) self.assertTrue('intTable' in names)
def testMinMax(self): """ Test queries on SQL functions by using the MIN and MAX functions """ dbobj = DBObject(driver=self.driver, database=self.database) query = 'SELECT MAX(thrice), MIN(thrice) FROM intTable' results = dbobj.execute_arbitrary(query) self.assertEqual(results[0][0], 594) self.assertEqual(results[0][1], 0) dtype = [('MAXthrice', int), ('MINthrice', int)] self.assertEqual(results.dtype, dtype)
def testReadOnlyFilter(self): """ Test that the filters we placed on queries made with execute_aribtrary() work """ dbobj = DBObject(driver=self.driver, database=self.database) controlQuery = 'SELECT doubleTable.id, intTable.id, doubleTable.log, intTable.thrice ' controlQuery += 'FROM doubleTable, intTable WHERE doubleTable.id = intTable.id' controlResults = dbobj.execute_arbitrary(controlQuery) #make sure that execute_arbitrary only accepts strings query = ['a', 'list'] self.assertRaises(RuntimeError, dbobj.execute_arbitrary, query) #check that our filter catches different capitalization permutations of the #verboten commands query = 'DROP TABLE junkTable' self.assertRaises(RuntimeError, dbobj.execute_arbitrary, query) self.assertRaises(RuntimeError, dbobj.execute_arbitrary, query.lower()) query = 'DELETE FROM junkTable WHERE id=4' self.assertRaises(RuntimeError, dbobj.execute_arbitrary, query) self.assertRaises(RuntimeError, dbobj.execute_arbitrary, query.lower()) query = 'UPDATE junkTable SET sqrt=0.0, log=0.0 WHERE id=4' self.assertRaises(RuntimeError, dbobj.execute_arbitrary, query) self.assertRaises(RuntimeError, dbobj.execute_arbitrary, query.lower()) query = 'INSERT INTO junkTable VALUES (9999, 1.0, 1.0)' self.assertRaises(RuntimeError, dbobj.execute_arbitrary, query) self.assertRaises(RuntimeError, dbobj.execute_arbitrary, query.lower()) query = 'Drop Table junkTable' self.assertRaises(RuntimeError, dbobj.execute_arbitrary, query) query = 'Delete FROM junkTable WHERE id=4' self.assertRaises(RuntimeError, dbobj.execute_arbitrary, query) query = 'Update junkTable SET sqrt=0.0, log=0.0 WHERE id=4' self.assertRaises(RuntimeError, dbobj.execute_arbitrary, query) query = 'Insert INTO junkTable VALUES (9999, 1.0, 1.0)' self.assertRaises(RuntimeError, dbobj.execute_arbitrary, query) query = 'dRoP TaBlE junkTable' self.assertRaises(RuntimeError, dbobj.execute_arbitrary, query) query = 'dElEtE FROM junkTable WHERE id=4' self.assertRaises(RuntimeError, dbobj.execute_arbitrary, query) query = 'uPdAtE junkTable SET sqrt=0.0, log=0.0 WHERE id=4' self.assertRaises(RuntimeError, dbobj.execute_arbitrary, query) query = 'iNsErT INTO junkTable VALUES (9999, 1.0, 1.0)' self.assertRaises(RuntimeError, dbobj.execute_arbitrary, query)
def __init__(self): self._dbo = DBObject(database='sysarch', host='terminal.lsst.org', port='3306', driver='mysql') self._object_list = [('Name', str, 300), ('Object_ID', np.int), ('ParentID', np.int), ('Object_type', str, 300), ('Author', str, 300), ('Version', str, 300), ('Note', str, 300), ('Package_ID', np.int)] self._object_dtype = np.dtype(self._object_list) self._object_query = 'select' for datum in self._object_list: if self._object_query!='select': self._object_query += ',' self._object_query += ' t.%s' % datum[0] self._object_query += ' from t_object t'
def testSingleTableQuery(self): """ Test a query on a single table (using chunk iterator) """ dbobj = DBObject(driver=self.driver, database=self.database) query = 'SELECT id, sqrt FROM doubleTable' results = dbobj.get_chunk_iterator(query) dtype = [('id', int), ('sqrt', float)] i = 1 for chunk in results: for row in chunk: self.assertEqual(row[0], i) self.assertAlmostEqual(row[1], numpy.sqrt(i)) self.assertEqual(dtype, row.dtype) i += 1 self.assertEqual(i, 201)
def testPassingConnection(self): """ Repeat the test from testJoin, but with a DBObject whose connection was passed directly from another DBObject, to make sure that passing a connection works """ dbobj_base = DBObject(driver=self.driver, database=self.database) dbobj = DBObject(connection=dbobj_base.connection) query = 'SELECT doubleTable.id, intTable.id, doubleTable.log, intTable.thrice ' query += 'FROM doubleTable, intTable WHERE doubleTable.id = intTable.id' results = dbobj.get_chunk_iterator(query, chunk_size=10) dtype = [ ('id', int), ('id_1', int), ('log', float), ('thrice', int)] i = 0 for chunk in results: if i<90: self.assertEqual(len(chunk), 10) for row in chunk: self.assertEqual(2*(i+1), row[0]) self.assertEqual(row[0], row[1]) self.assertAlmostEqual(numpy.log(row[0]), row[2], 6) self.assertEqual(3*row[0], row[3]) self.assertEqual(dtype, row.dtype) i += 1 self.assertEqual(i, 99) #make sure that we found all the matches whe should have results = dbobj.execute_arbitrary(query) self.assertEqual(dtype, results.dtype) i = 0 for row in results: self.assertEqual(2*(i+1), row[0]) self.assertEqual(row[0], row[1]) self.assertAlmostEqual(numpy.log(row[0]), row[2], 6) self.assertEqual(3*row[0], row[3]) i += 1 self.assertEqual(i, 99)
def testDtype(self): """ Test that passing dtype to a query works (also test q query on a single table using .execute_arbitrary() directly """ dbobj = DBObject(driver=self.driver, database=self.database) query = 'SELECT id, log FROM doubleTable' dtype = [('id', int), ('log', float)] results = dbobj.execute_arbitrary(query, dtype=dtype) self.assertEqual(results.dtype, dtype) for xx in results: self.assertAlmostEqual(numpy.log(xx[0]), xx[1], 6) self.assertEqual(len(results), 200) results = dbobj.get_chunk_iterator(query, chunk_size=10, dtype=dtype) results.next() for chunk in results: self.assertEqual(chunk.dtype, dtype)
def testSingleTableQuery(self): """ Test a query on a single table (using chunk iterator) """ dbobj = DBObject(driver=self.driver, database=self.database) query = 'SELECT id, sqrt FROM doubleTable' results = dbobj.get_chunk_iterator(query) dtype = [ ('id', int), ('sqrt', float)] i = 1 for chunk in results: for row in chunk: self.assertEqual(row[0], i) self.assertAlmostEqual(row[1], numpy.sqrt(i)) self.assertEqual(dtype, row.dtype) i += 1 self.assertEqual(i, 201)
def testDtype(self): """ Test that passing dtype to a query works (also test q query on a single table using .execute_arbitrary() directly """ dbobj = DBObject(driver=self.driver, database=self.database) query = 'SELECT id, log FROM doubleTable' dtype = [('id', int), ('log', float)] results = dbobj.execute_arbitrary(query, dtype = dtype) self.assertEqual(results.dtype, dtype) for xx in results: self.assertAlmostEqual(numpy.log(xx[0]), xx[1], 6) self.assertEqual(len(results), 200) results = dbobj.get_chunk_iterator(query, chunk_size=10, dtype=dtype) results.next() for chunk in results: self.assertEqual(chunk.dtype, dtype)
def testJoin(self): """ Test a join """ dbobj = DBObject(driver=self.driver, database=self.database) query = 'SELECT doubleTable.id, intTable.id, doubleTable.log, intTable.thrice ' query += 'FROM doubleTable, intTable WHERE doubleTable.id = intTable.id' results = dbobj.get_chunk_iterator(query, chunk_size=10) dtype = [ ('id', int), ('id_1', int), ('log', float), ('thrice', int)] i = 0 for chunk in results: if i<90: self.assertEqual(len(chunk), 10) for row in chunk: self.assertEqual(2*(i+1), row[0]) self.assertEqual(row[0], row[1]) self.assertAlmostEqual(numpy.log(row[0]), row[2], 6) self.assertEqual(3*row[0], row[3]) self.assertEqual(dtype, row.dtype) i += 1 self.assertEqual(i, 99) #make sure that we found all the matches whe should have results = dbobj.execute_arbitrary(query) self.assertEqual(dtype, results.dtype) i = 0 for row in results: self.assertEqual(2*(i+1), row[0]) self.assertEqual(row[0], row[1]) self.assertAlmostEqual(numpy.log(row[0]), row[2], 6) self.assertEqual(3*row[0], row[3]) i += 1 self.assertEqual(i, 99)
def testPassingConnection(self): """ Repeat the test from testJoin, but with a DBObject whose connection was passed directly from another DBObject, to make sure that passing a connection works """ dbobj_base = DBObject(driver=self.driver, database=self.database) dbobj = DBObject(connection=dbobj_base.connection) query = 'SELECT doubleTable.id, intTable.id, doubleTable.log, intTable.thrice ' query += 'FROM doubleTable, intTable WHERE doubleTable.id = intTable.id' results = dbobj.get_chunk_iterator(query, chunk_size=10) dtype = [('id', int), ('id_1', int), ('log', float), ('thrice', int)] i = 0 for chunk in results: if i < 90: self.assertEqual(len(chunk), 10) for row in chunk: self.assertEqual(2 * (i + 1), row[0]) self.assertEqual(row[0], row[1]) self.assertAlmostEqual(numpy.log(row[0]), row[2], 6) self.assertEqual(3 * row[0], row[3]) self.assertEqual(dtype, row.dtype) i += 1 self.assertEqual(i, 99) #make sure that we found all the matches whe should have results = dbobj.execute_arbitrary(query) self.assertEqual(dtype, results.dtype) i = 0 for row in results: self.assertEqual(2 * (i + 1), row[0]) self.assertEqual(row[0], row[1]) self.assertAlmostEqual(numpy.log(row[0]), row[2], 6) self.assertEqual(3 * row[0], row[3]) i += 1 self.assertEqual(i, 99)
def catsim_query(objid, constraint, catalog, radius, opsim_metadata): """ Query catsim and make a catalog """ obs_metadata = ObservationMetaData(circ_bounds=dict( ra=opsim_metadata[1] * 180 / pi, dec=opsim_metadata[2] * 180 / pi, radius=radius), mjd=opsim_metadata[5]) dbobj = DBObject.from_objid(objid) t = dbobj.getCatalog(catalog, obs_metadata=obs_metadata, constraint=constraint) # filename = 'test_reference.dat' # t.write_catalog(filename, chunk_size=10) return t, obs_metadata
def testValidationErrors(self): """ Test that appropriate errors and warnings are thrown when connecting """ with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") DBObject('sqlite:///' + self.database) assert len(w) == 1 #missing database self.assertRaises(AttributeError, DBObject, driver=self.driver) #missing driver self.assertRaises(AttributeError, DBObject, database=self.database) #missing host self.assertRaises(AttributeError, DBObject, driver='mssql+pymssql') #missing port self.assertRaises(AttributeError, DBObject, driver='mssql+pymssql', host='localhost')
import time if __name__ == "__main__": output_dir = "magnitude_out_dir" monet_root_dir = os.path.join("/astro", "store", "pogo1", "monetBrightStars") monet_dir_list = [] for ii in range(6): name = os.path.join(monet_root_dir, "Disc_%d" % ii) monet_dir_list.append(name) db = DBObject(database='LSSTCATSIM', host='fatboy.phys.washington.edu', port=1433, driver='mssql+pymssql') data_dtype = np.dtype([('id', long), ('ra', float), ('dec', float), ('mura', float), ('mudec', float), ('b', float), ('v', float), ('u', float), ('g', float), ('r', float), ('i', float), ('z', float), ('y', float), ('j', float), ('h', float), ('k', float), ('w1', float), ('w2', float), ('w3', float), ('w4', float), ('sst', float), ('flag', long)]) query_dtype = np.dtype([('id', long), ('u', float), ('g', float), ('r', float), ('i', float), ('z', float), ('y', float), ('residual', float)])
transformations = {'raJ2000':numpy.degrees, 'decJ2000':numpy.degrees} def get_objectId(self): return self.column_by_name(self.refIdCol) @compound('sedPathBulge', 'sedPathDisk', 'sedPathAgn') def get_sedFilepath(self): return (numpy.array([None if k == 'None' else self.specFileMap[k] for k in self.column_by_name('sedFilenameBulge')], dtype=(str, 64)), numpy.array([None if k == 'None' else self.specFileMap[k] for k in self.column_by_name('sedFilenameDisk')], dtype=(str, 64)), numpy.array([None if k == 'None' else self.specFileMap[k] for k in self.column_by_name('sedFilenameAgn')], dtype=(str, 64))) obs_metadata = ObservationMetaData(circ_bounds=dict(ra=0.0, dec=0.0, radius=0.05)) dbobj = DBObject.from_objid('galaxyTiled') constraint = "i_ab < 25.3" filetype = 'galaxy_catalog' print "Getting galaxy catalog" cat = dbobj.getCatalog(filetype, obs_metadata=obs_metadata, constraint=constraint) filename = "catalog.dat" print "Writing galaxy catalog" cat.write_catalog(filename, chunk_size=100000)
class EADBWrapper(object): def __init__(self): self._dbo = DBObject(database='sysarch', host='terminal.lsst.org', port='3306', driver='mysql') self._object_list = [('Name', str, 300), ('Object_ID', np.int), ('ParentID', np.int), ('Object_type', str, 300), ('Author', str, 300), ('Version', str, 300), ('Note', str, 300), ('Package_ID', np.int)] self._object_dtype = np.dtype(self._object_list) self._object_query = 'select' for datum in self._object_list: if self._object_query!='select': self._object_query += ',' self._object_query += ' t.%s' % datum[0] self._object_query += ' from t_object t' def objectIdFromName(self, name): dtype = np.dtype([('name', str, 300), ('ObjectID', np.int)]) query = "select t.name, t.Object_ID from t_object t where t.name = '%s'" % name results = self._dbo.execute_arbitrary(query, dtype=dtype) return results['ObjectID'] def _getDaughtersFromObjID(self, objid): """ Recursively return a list of Object_IDs corresponding to the objects descended (either directly or indirectly) from an object specified by an Object_ID """ query = self._object_query + " where t.ParentID=%d" % objid results = self._dbo.execute_arbitrary(query, dtype=self._object_dtype) ans = list(results["Object_ID"]) for aa in results["Object_ID"]: new_results = self._getDaughtersFromObjID(aa) ans += new_results return ans def writeFamilyTree(self, name, author=None, version=None, file_handle=sys.stdout): """ Write out all of the objects descended from <name> @param[in] name is a string denoting the name of the desired object @param[in] author is an optional string denoting the author of the desired object (in case there are multiple versions) @param[in] version is an optional string denoting the version of the desired object (in case there are multiple versions) @param[in] file_handle points to the output file (default is stdout) This method writes its results to the destination specified by file_handle """ objIdList = self.getFamilyIDs(name=name, author=author, version=version) objList = SysMLObjectList(self, objIdList) objList.writeFamilyTree(file_handle=file_handle) def writeFamilyTreeFromID(self, objid, file_handle=sys.stdout): objIdList = self.getFamilyIDs(objid=objid) objList = SysMLObjectList(self, objIdList) objList.writeFamilyTree(file_handle=file_handle) def getFamilyIDs(self, name=None, author=None, version=None, objid=None): """ Get a list of the Object_IDs of all objects descended (directly or indirectly) from a specified object @param[in] name is a string denoting the name of the desired object @param[in] author is an optional string denoting the author of the desired object (in case there are multiple versions) @param[in] version is an optional string denoting the version of the desired object (in case there are multiple versions) @param[in] objid is an optional integer denoting the Object_ID of the object whose tree you want to write out (if you already know it) @param[out] a list of ints denoting the Object_IDs of all of the objects beneath the desired object in its family tree. """ if objid is None: query = self._object_query + " where t.name='%s'" % name if author is not None: query += " and t.Author='%s'" % author if version is not None: query += " and t.Version='%s'" % version else: query = self._object_query + " where t.Object_ID=%d" % objid results = self._dbo.execute_arbitrary(query, dtype=self._object_dtype) if len(results)>1: raise RuntimeError('More than one object match the name you gave. ' 'Try specifying an author or a version') if len(results)==0: raise RuntimeError('No objects matched the name you gave.') return list(results["Object_ID"]) + self._getDaughtersFromObjID(results["Object_ID"][0])
import matplotlib.pyplot as plt import time if __name__ == "__main__": output_dir = "magnitude_out_dir" monet_root_dir = os.path.join("/astro", "store", "pogo1", "monetBrightStars") monet_dir_list = [] for ii in range(6): name = os.path.join(monet_root_dir, "Disc_%d" % ii) monet_dir_list.append(name) db = DBObject(database='LSSTCATSIM', host='fatboy.phys.washington.edu', port=1433, driver='mssql+pymssql') data_dtype = np.dtype([('id', long), ('ra', float), ('dec', float), ('mura', float), ('mudec', float), ('b', float), ('v', float), ('u', float), ('g', float), ('r', float), ('i', float), ('z', float), ('y', float), ('j', float), ('h', float), ('k', float), ('w1', float), ('w2', float), ('w3', float), ('w4', float), ('sst', float), ('flag', long)])
class ObservationMetaDataGenerator(object): """ This is a class that allows the user to query an opsim output database for ObservationMetaData instantiations that fit certain criteria. The major method is ObservationMetaDataGenerator.getObservationMetaData() which accepts bounds on columns of the opsim summary table and returns a list of ObservationMetaData instantiations that fall within those bounds. """ def _put_quotations(self, val): """ This formats the user's input of telescopeFilter; in must be enclosed in single quotation marks. This method adds them if necessary. @param [in] val is a string (denoting a Telescope Filter) @param [out] a string containing 'val' (i.e. the input value enclosed in single quotation marks, if they are not already there) """ if val[0]=='\'': return val else: return "'%s'" % val def __init__(self, driver=None, host=None, port=None, database=None): """ @param [in] name of opsim database driver (e.g. 'sqlite', 'mssql+pymssql') @param [in] hostname of opsim database for the opsim db db to be queried @param [in] port of opsim database @param [in] database name. If None, will default to opsimblitz1_1133_sqlite.db stored in sims_data/OpSimData/ """ if database is None: dbPath = os.path.join(lsst.utils.getPackageDir('sims_data'),'OpSimData/') self.database = os.path.join(dbPath, 'opsimblitz1_1133_sqlite.db') self.driver = 'sqlite' self.host = None self.port = None else: self.driver = driver self.database = database self.host = host self.port = port self.opsimdb = DBObject(driver=self.driver, database=self.database, host=self.host, port=self.port) #27 January 2015 #self.columnMapping is an list of tuples. Each tuple corresponds to a column in the opsim #database's summary table. #The 0th element of the tuple is how users will #refer to the OpSim summary table columns (ie. how they are called in getObservationMetaDAta). # #The 1st element of the tuple is how the column is named in the OpSim db. # #The 2nd element of the tuple is how PhoSim refers to the quantity (as of OpSim3_61DBObject.py) #(this is None if PhoSim does not expect the quantity) # #The 3rd element of the tuple is the datatype of the column # #The 4th element of the tuple is any coordinate transformation required to go from the user interface #to the OpSim database (i.e. OpSim stores all angles in radians; we would like users to be #able to specify angles in degrees) # #Note that this conforms to an older #PhoSim API. At some time in the future, both this and OpSim3_61DBObject.py #(and possibly phoSimCatalogExamples.py) will need to be updated to #reflect what PhoSim actually expects now. # self.columnMapping = [('obsHistID', 'obsHistID', 'Opsim_obshistid' ,numpy.int64, None), ('expDate', 'expDate', 'SIM_SEED', int, None), ('fieldRA', 'fieldRA', 'Unrefracted_RA', float, numpy.radians), ('fieldDec', 'fieldDec', 'Unrefracted_Dec', float, numpy.radians), ('moonRA', 'moonRA', 'Opsim_moonra', float, numpy.radians), ('moonDec', 'moonDec', 'Opsim_moondec', float, numpy.radians), ('rotSkyPos', 'rotSkyPos', 'Opsim_rotskypos', float, numpy.radians), ('telescopeFilter', 'filter', 'Opsim_filter', (str,1), self._put_quotations), ('rawSeeing', 'rawSeeing', 'Opsim_rawseeing', float, None), ('seeing', 'finSeeing', None, float, None), ('sunAlt', 'sunAlt', 'Opsim_sunalt', float, numpy.radians), ('moonAlt', 'moonAlt', 'Opsim_moonalt', float, numpy.radians), ('dist2Moon', 'dist2Moon', 'Opsim_dist2moon', float, numpy.radians), ('moonPhase', 'moonPhase', 'Opsim_moonphase', float, None), ('expMJD', 'expMJD', 'Opsim_expmjd', float, None), ('altitude', 'altitude', 'Opsim_altitude', float, numpy.radians), ('azimuth', 'azimuth', 'Opsim_azimuth', float, numpy.radians), ('visitExpTime', 'visitExpTime', 'exptime', float, None), ('airmass', 'airmass', 'airmass', float, None), ('m5', 'fiveSigmaDepth', None, float, None), ('skyBrightness', 'filtSkyBrightness', None, float, None)] #Set up self.dtype containg the dtype of the recarray we expect back from the SQL query. #Also setup baseQuery which is just the SELECT clause of the SQL query dtypeList = [] self.baseQuery = 'SELECT' for column in self.columnMapping: dtypeList.append((column[1],column[3])) if self.baseQuery != 'SELECT': self.baseQuery += ',' self.baseQuery += ' ' + column[1] self.dtype = numpy.dtype(dtypeList) def getObservationMetaData(self, obsHistID=None, expDate=None, fieldRA=None, fieldDec=None, moonRA=None, moonDec=None, rotSkyPos=None, telescopeFilter=None, rawSeeing=None, seeing=None, sunAlt=None, moonAlt=None, dist2Moon=None, moonPhase=None, expMJD=None, altitude=None, azimuth=None, visitExpTime=None, airmass=None, skyBrightness=None, m5=None, boundType='circle', boundLength=0.1, limit=None): """ This method will query the OpSim database summary table according to user-specified constraints and return a list of of ObservationMetaData instantiations consistent with those constraints. @param [in] limit is an integer denoting the maximum number of ObservationMetaData to be returned @param [in] boundType is the boundType of the ObservationMetaData to be returned (see documentation in sims_catalogs_generation/../db/spatialBounds.py for more details) @param [in] boundLength is the boundLength of the ObservationMetaData to be returned (in degrees; see documentation in sims_catalogs_generation/../db/spatialBounds.py for more details) All other input parameters are constraints to be placed on the SQL query of the opsim output db. These contraints can either be tuples of the form (min, max) or an exact value the user wants returned. Parameters that can be constrained are: @param [in] fieldRA in degrees @param [in] fieldDec in degrees @param [in] altitude in degrees @param [in] azimuth in degrees @param [in] moonRA in degrees @param [in] moonDec in degrees @param [in] moonAlt in degrees @param [in] moonPhase (a value from 1 to 100 indicating how much of the moon is illuminated) @param [in] dist2Moon the distance between the telescope pointing and the moon in degrees @param [in] sunAlt in degrees @param [in[ rotSkyPos (the angle of the sky with respect to the camera coordinate system) in degrees @param [in] telescopeFilter a string that is one of u,g,r,i,z,y @param [in] airmass @param [in] rawSeeing (this is an idealized seeing at zenith at 500nm in arcseconds) @param [in] seeing (this is the OpSim column 'finSeeing' in arcseconds) @param [in] visitExpTime the exposure time in seconds @param [in] obsHistID the integer used by OpSim to label pointings @param [in] expDate is the date of the exposure (units????) @param [in] expMJD is the MJD of the exposure @param [in] m5 is the five sigma depth of the observation @param [in] skyBrightness """ query = self.baseQuery+ ' FROM SUMMARY' nConstraints = 0 #the number of constraints in this query for column in self.columnMapping: value = eval(column[0]) if value is not None: if nConstraints > 0: query += ' AND' else: query += ' WHERE ' if isinstance(value,tuple): if len(value)>2: raise RuntimeError('Cannot pass a tuple longer than 2 elements '+ 'to getObservationMetaData: %s is len %d' % (column[0], len(value))) #perform any necessary coordinate transformations if column[4] is not None: vmin = column[4](value[0]) vmax = column[4](value[1]) else: vmin = value[0] vmax = value[1] query += ' %s > %s AND %s < %s' % \ (column[1], vmin, column[1], vmax) else: #perform any necessary coordinate transformations if column[4] is not None: vv = column[4](value) else: vv = value query += ' %s == %s' % (column[1], vv) nConstraints += 1 if limit is not None: query += ' LIMIT %d' % limit if nConstraints==0 and limit is None: raise RuntimeError('You did not specify any contraints on your query;' + ' you will just return ObservationMetaData for all poitnings') results = self.opsimdb.execute_arbitrary(query, dtype=self.dtype) #convert the results into ObservationMetaData instantiations obs_output = [ObservationMetaData(m5=pointing['fiveSigmaDepth'], boundType=boundType, boundLength=boundLength, skyBrightness=pointing['filtSkyBrightness'], seeing=pointing['finSeeing'], phoSimMetaData=OrderedDict([(column[2], (pointing[column[1]], pointing[column[1]].dtype)) for column in self.columnMapping if column[2] is not None])) for pointing in results] return obs_output
def __init__(self, database, host, port, driver): self._dbo = DBObject(database=database, host=host, port=port, driver=driver)
import time if __name__ == "__main__": # grid step size in magnitude and declination # can be set to whatever the user wants dmag = 0.1 ddec = 0.1 t_start = time.time() output_file_name = "mag_vs_dec.eps" db = DBObject(database='LSSTCATSIM', host='localhost', port=51433, driver='mssql+pymssql') cmd = "SELECT catalogid, ra, decl, umag_noatm, gmag_noatm, rmag_noatm, " cmd += "imag_noatm, zmag_noatm, ymag_noatm, residual " cmd += "FROM bright_stars WHERE ra < 20.0 and ra > 0.0" query_dtype = np.dtype([('id', long), ('ra', float), ('dec', float), ('u', float), ('g', float), ('r', float), ('i', float), ('z', float), ('y', float), ('residual', float)]) u_grid = {} g_grid = {} r_grid = {} i_grid = {}
def __init__(self, driver=None, host=None, port=None, database=None): """ @param [in] name of opsim database driver (e.g. 'sqlite', 'mssql+pymssql') @param [in] hostname of opsim database for the opsim db db to be queried @param [in] port of opsim database @param [in] database name. If None, will default to opsimblitz1_1133_sqlite.db stored in sims_data/OpSimData/ """ if database is None: dbPath = os.path.join(lsst.utils.getPackageDir('sims_data'),'OpSimData/') self.database = os.path.join(dbPath, 'opsimblitz1_1133_sqlite.db') self.driver = 'sqlite' self.host = None self.port = None else: self.driver = driver self.database = database self.host = host self.port = port self.opsimdb = DBObject(driver=self.driver, database=self.database, host=self.host, port=self.port) #27 January 2015 #self.columnMapping is an list of tuples. Each tuple corresponds to a column in the opsim #database's summary table. #The 0th element of the tuple is how users will #refer to the OpSim summary table columns (ie. how they are called in getObservationMetaDAta). # #The 1st element of the tuple is how the column is named in the OpSim db. # #The 2nd element of the tuple is how PhoSim refers to the quantity (as of OpSim3_61DBObject.py) #(this is None if PhoSim does not expect the quantity) # #The 3rd element of the tuple is the datatype of the column # #The 4th element of the tuple is any coordinate transformation required to go from the user interface #to the OpSim database (i.e. OpSim stores all angles in radians; we would like users to be #able to specify angles in degrees) # #Note that this conforms to an older #PhoSim API. At some time in the future, both this and OpSim3_61DBObject.py #(and possibly phoSimCatalogExamples.py) will need to be updated to #reflect what PhoSim actually expects now. # self.columnMapping = [('obsHistID', 'obsHistID', 'Opsim_obshistid' ,numpy.int64, None), ('expDate', 'expDate', 'SIM_SEED', int, None), ('fieldRA', 'fieldRA', 'Unrefracted_RA', float, numpy.radians), ('fieldDec', 'fieldDec', 'Unrefracted_Dec', float, numpy.radians), ('moonRA', 'moonRA', 'Opsim_moonra', float, numpy.radians), ('moonDec', 'moonDec', 'Opsim_moondec', float, numpy.radians), ('rotSkyPos', 'rotSkyPos', 'Opsim_rotskypos', float, numpy.radians), ('telescopeFilter', 'filter', 'Opsim_filter', (str,1), self._put_quotations), ('rawSeeing', 'rawSeeing', 'Opsim_rawseeing', float, None), ('seeing', 'finSeeing', None, float, None), ('sunAlt', 'sunAlt', 'Opsim_sunalt', float, numpy.radians), ('moonAlt', 'moonAlt', 'Opsim_moonalt', float, numpy.radians), ('dist2Moon', 'dist2Moon', 'Opsim_dist2moon', float, numpy.radians), ('moonPhase', 'moonPhase', 'Opsim_moonphase', float, None), ('expMJD', 'expMJD', 'Opsim_expmjd', float, None), ('altitude', 'altitude', 'Opsim_altitude', float, numpy.radians), ('azimuth', 'azimuth', 'Opsim_azimuth', float, numpy.radians), ('visitExpTime', 'visitExpTime', 'exptime', float, None), ('airmass', 'airmass', 'airmass', float, None), ('m5', 'fiveSigmaDepth', None, float, None), ('skyBrightness', 'filtSkyBrightness', None, float, None)] #Set up self.dtype containg the dtype of the recarray we expect back from the SQL query. #Also setup baseQuery which is just the SELECT clause of the SQL query dtypeList = [] self.baseQuery = 'SELECT' for column in self.columnMapping: dtypeList.append((column[1],column[3])) if self.baseQuery != 'SELECT': self.baseQuery += ',' self.baseQuery += ' ' + column[1] self.dtype = numpy.dtype(dtypeList)