def setUpClass(cls): cls.scratch_dir = tempfile.mkdtemp(dir=ROOT, prefix="scratchSpace-") cls.db_src_name = os.path.join(cls.scratch_dir, 'compound_cat_filter_db.txt') if os.path.exists(cls.db_src_name): os.unlink(cls.db_src_name) cls.db_name = os.path.join(cls.scratch_dir, 'compound_cat_filter_db.db') if os.path.exists(cls.db_name): os.unlink(cls.db_name) with open(cls.db_src_name, 'w') as output_file: output_file.write('#a header\n') for ii in range(10): output_file.write('%d %d %d %d\n' % (ii, ii + 1, ii + 2, ii + 3)) dtype = np.dtype([('id', int), ('ip1', int), ('ip2', int), ('ip3', int)]) fileDBObject(cls.db_src_name, runtable='test', dtype=dtype, idColKey='id', database=cls.db_name)
def setUpClass(cls): cls.scratch_dir = tempfile.mkdtemp(dir=ROOT, prefix="scratchSpace-") cls.write_star_txt() cls.write_galaxy_txt() cls.dbName = os.path.join(cls.scratch_dir, 'ConnectionPassingTestDB.db') if os.path.exists(cls.dbName): os.unlink(cls.dbName) galDtype = np.dtype([('id', np.int), ('raJ2000', np.float), ('decJ2000', np.float), ('redshift', np.float), ('umag', np.float), ('gmag', np.float)]) starDtype = np.dtype([('id', np.int), ('raJ2000', np.float), ('decJ2000', np.float), ('umag', np.float), ('gmag', np.float)]) fileDBObject(cls.star_txt_name, database=cls.dbName, driver='sqlite', runtable='stars', idColKey='id', dtype=starDtype) fileDBObject(cls.gal_txt_name, database=cls.dbName, driver='sqlite', runtable='galaxies', idColKey='id', dtype=galDtype)
def setUpClass(cls): cls.obs = ObservationMetaData( bandpassName=['u', 'g', 'r', 'i', 'z', 'y'], m5=[22.0, 23.0, 24.0, 25.0, 26.0, 27.0]) baselineDtype = np.dtype([ (name, np.float) for name in baselineSSMCatalog.column_outputs ]) dbdtype = np.dtype([('id', np.int), ('sedFilename', str, 100), ('magNorm', np.float), ('velRA', np.float), ('velDec', np.float)]) inputDir = os.path.join(getPackageDir('sims_catUtils'), 'tests', 'testData') inputFile = os.path.join(inputDir, 'SSMphotometryCatalog.txt') cls.db = fileDBObject(inputFile, runtable='test', idColKey='id', dtype=dbdtype) cat = baselineSSMCatalog(cls.db, obs_metadata=cls.obs) catName = tempfile.mktemp(prefix='IndexTestCase_setUpClass') cat.write_catalog(catName) cls.controlData = np.genfromtxt(catName, dtype=baselineDtype, delimiter=',') os.unlink(catName)
def setUpClass(cls): cls.obs = ObservationMetaData( bandpassName=['u', 'g', 'r', 'i', 'z', 'y'], m5=[22.0, 23.0, 24.0, 25.0, 26.0, 27.0]) baselineDtype = np.dtype([ (name, np.float) for name in baselineStarCatalog.column_outputs ]) dbdtype = np.dtype([('id', np.int), ('raJ2000', np.float), ('decJ2000', np.float), ('sedFilename', str, 100), ('magNorm', np.float), ('galacticAv', np.float)]) inputDir = os.path.join(getPackageDir('sims_catUtils'), 'tests', 'testData') inputFile = os.path.join(inputDir, 'IndicesTestCatalogStars.txt') cls.db = fileDBObject(inputFile, runtable='test', idColKey='id', dtype=dbdtype) cat = baselineStarCatalog(cls.db, obs_metadata=cls.obs) cls.catName = tempfile.mktemp(dir=ROOT, prefix='indicesStarControlCat-', suffix='.txt') cat.write_catalog(cls.catName) cls.controlData = np.genfromtxt(cls.catName, dtype=baselineDtype, delimiter=',') os.unlink(cls.catName)
def setUpClass(cls): rng = np.random.RandomState(99) n_sne = 100 ra_list = rng.random_sample(n_sne) * 7.0 + 78.0 dec_list = rng.random_sample(n_sne) * 4.0 - 69.0 zz_list = rng.random_sample(n_sne) * 1.0 + 0.05 cls.scratchDir = tempfile.mkdtemp(dir=ROOT, prefix='scratchSpace-') cls.input_cat_name = os.path.join(cls.scratchDir, "sne_input_cat.txt") with open(cls.input_cat_name, "w") as output_file: for ix in range(n_sne): output_file.write("%d;%.12f;%.12f;%.12f;%.12f;%.12f\n" % (ix + 1, ra_list[ix], dec_list[ix], np.radians(ra_list[ix]), np.radians(dec_list[ix]), zz_list[ix])) dtype = np.dtype([('id', np.int), ('raDeg', np.float), ('decDeg', np.float), ('raJ2000', np.float), ('decJ2000', np.float), ('redshift', np.float)]) cls.db = fileDBObject(cls.input_cat_name, delimiter=';', runtable='test', dtype=dtype, idColKey='id') cls.db.raColName = 'raDeg' cls.db.decColName = 'decDeg' cls.db.objectTypeId = 873 cls.opsimDb = os.path.join(getPackageDir("sims_data"), "OpSimData") cls.opsimDb = os.path.join(cls.opsimDb, "opsimblitz1_1133_sqlite.db")
def setUpClass(cls): cls.obs = ObservationMetaData(bandpassName=['u', 'g', 'r', 'i', 'z', 'y'], m5 = [22.0, 23.0, 24.0, 25.0, 26.0, 27.0]) baselineDtype = np.dtype([(name, np.float) for name in baselineSSMCatalog.column_outputs]) dbdtype = np.dtype([ ('id', np.int), ('sedFilename', str, 100), ('magNorm', np.float), ('velRA', np.float), ('velDec', np.float) ]) inputDir = os.path.join(getPackageDir('sims_catUtils'), 'tests', 'testData') inputFile = os.path.join(inputDir, 'SSMphotometryCatalog.txt') cls.db = fileDBObject(inputFile, runtable='test', idColKey='id', dtype=dbdtype) cat = baselineSSMCatalog(cls.db, obs_metadata=cls.obs) catName = tempfile.mktemp(prefix='IndexTestCase_setUpClass') cat.write_catalog(catName) cls.controlData = np.genfromtxt(catName, dtype=baselineDtype, delimiter=',') os.unlink(catName)
def setUpClass(cls): cls.obs = ObservationMetaData(bandpassName=['u', 'g', 'r', 'i', 'z', 'y'], m5=[24.0, 25.0, 26.0, 27.0, 28.0, 29.0]) dtype = np.dtype([ ('id', np.int), ('sedFilenameBulge', str, 100), ('magNormBulge', np.float), ('sedFilenameDisk', str, 100), ('magNormDisk', np.float), ('sedFilenameAgn', str, 100), ('magNormAgn', np.float), ('internalAvBulge', np.float), ('internalAvDisk', np.float), ('galacticAv', np.float), ('redshift', np.float) ]) inputDir = os.path.join(getPackageDir('sims_catUtils'), 'tests', 'testData') inputFile = os.path.join(inputDir, 'IndicesTestCatalogGalaxies.txt') cls.db = fileDBObject(inputFile, dtype=dtype, runtable='test', idColKey='id') cls.db.objectTypeId = 44 cat = baselineGalaxyCatalog(cls.db, obs_metadata=cls.obs) dtype = np.dtype([(name, np.float) for name in cat.column_outputs]) catName = tempfile.mktemp(dir=ROOT, prefix='', suffix='.txt') cat.write_catalog(catName) cls.controlData = np.genfromtxt(catName, dtype=dtype, delimiter=',') os.remove(catName)
def setUpClass(cls): cls.obs = ObservationMetaData(bandpassName=['u', 'g', 'r', 'i', 'z', 'y'], m5 = [22.0, 23.0, 24.0, 25.0, 26.0, 27.0]) baselineDtype = np.dtype([(name, np.float) for name in baselineStarCatalog.column_outputs]) dbdtype = np.dtype([ ('id', np.int), ('raJ2000', np.float), ('decJ2000', np.float), ('sedFilename', str, 100), ('magNorm', np.float), ('galacticAv', np.float) ]) inputDir = os.path.join(getPackageDir('sims_catUtils'), 'tests', 'testData') inputFile = os.path.join(inputDir, 'IndicesTestCatalogStars.txt') cls.db = fileDBObject(inputFile, runtable='test', idColKey='id', dtype=dbdtype) cat = baselineStarCatalog(cls.db, obs_metadata=cls.obs) cls.catName = tempfile.mktemp(dir=ROOT, prefix='indicesStarControlCat-', suffix='.txt') cat.write_catalog(cls.catName) cls.controlData = np.genfromtxt(cls.catName, dtype=baselineDtype, delimiter=',') os.unlink(cls.catName)
def setUpClass(cls): cls.obs = ObservationMetaData( bandpassName=['u', 'g', 'r', 'i', 'z', 'y'], m5=[24.0, 25.0, 26.0, 27.0, 28.0, 29.0]) dtype = np.dtype([('id', np.int), ('sedFilenameBulge', str, 100), ('magNormBulge', np.float), ('sedFilenameDisk', str, 100), ('magNormDisk', np.float), ('sedFilenameAgn', str, 100), ('magNormAgn', np.float), ('internalAvBulge', np.float), ('internalAvDisk', np.float), ('galacticAv', np.float), ('redshift', np.float)]) inputDir = os.path.join(getPackageDir('sims_catUtils'), 'tests', 'testData') inputFile = os.path.join(inputDir, 'IndicesTestCatalogGalaxies.txt') cls.db = fileDBObject(inputFile, dtype=dtype, runtable='test', idColKey='id') cls.db.objectTypeId = 44 cat = baselineGalaxyCatalog(cls.db, obs_metadata=cls.obs) dtype = np.dtype([(name, np.float) for name in cat.column_outputs]) catName = tempfile.mktemp(dir=ROOT, prefix='', suffix='.txt') cat.write_catalog(catName) cls.controlData = np.genfromtxt(catName, dtype=dtype, delimiter=',') os.remove(catName)
def create_star_cache(db=None): """ Read the stars from a database and cache them in $TWINKLES_DIR/data/star_cache.db in the table 'star_cache_table' Params ------ db is a CatalogDBObject connecting to the database from which to read the data. If None, this will use the CatSim class StarObj(), which connects to the star table on fatboy. """ star_dtype = np.dtype([('simobjid', int), ('ra', float), ('decl', float), ('magNorm', float), ('mura', float), ('mudecl', float), ('parallax', float), ('ebv', float), ('vrad', float), ('varParamStr', str, 256), ('sedfilename', str, 40), ('gmag', float)]) col_names = list(star_dtype.names) star_cache_name = os.path.join(getPackageDir('twinkles'), 'data', 'twinkles_star_cache.txt') star_db_name = os.path.join(getPackageDir('twinkles'), 'data', 'star_cache.db') if db is None: db = StarObj() result_iterator = db.query_columns(colnames=col_names, chunk_size=100000, obs_metadata=_obs) with open(star_cache_name, 'w') as output_file: output_file.write('# ') for name in col_names: output_file.write('%s ' % name) output_file.write('\n') for chunk in result_iterator: for line in chunk: output_file.write(( '%d;%.17g;%.17g;%.17g;%.17g;%.17g;%.17g;%.17g;%.17g;%s;%s;%.17g\n' % (line[1], line[2], line[3], line[4], line[5], line[6], line[7], line[8], line[9], str(line[10]), str(line[11]), line[12])).replace('nan', 'NULL').replace('None', 'NULL')) if os.path.exists(star_db_name): os.unlink(star_db_name) dbo = fileDBObject(star_cache_name, driver='sqlite', runtable='star_cache_table', database=star_db_name, dtype=star_dtype, delimiter=';', idColKey='simobjid') if os.path.exists(star_cache_name): os.unlink(star_cache_name)
def setUpClass(cls): cls.dbFile = os.path.join(getPackageDir('sims_catUtils'), 'tests', 'testData', 'SSMastrometryCatalog.txt') cls.dtype = np.dtype([('id', np.int), ('raJ2000', np.float), ('decJ2000', np.float), ('velRa', np.float), ('velDec', np.float)]) cls.astDB = fileDBObject(cls.dbFile, runtable='test', dtype=cls.dtype, idColKey='id')
def setUpClass(cls): cls.dbFile = os.path.join(getPackageDir('sims_catUtils'), 'tests', 'testData', 'SSMphotometryCatalog.txt') cls.dtype = np.dtype([('id', np.int), ('sedFilename', str, 100), ('magNorm', np.float), ('velRa', np.float), ('velDec', np.float)]) cls.photDB = fileDBObject(cls.dbFile, runtable='test', dtype=cls.dtype, idColKey='id')
def setUpClass(cls): cls.baseDir = tempfile.mkdtemp(dir=ROOT, prefix='scratchSpace-') cls.textFileName = os.path.join(cls.baseDir, 'compound_obs_metadata_text_data.txt') numpy.random.seed(42) nSamples = 100 raList = numpy.random.random_sample(nSamples)*360.0 decList = numpy.random.random_sample(nSamples)*180.0 - 90.0 magList = numpy.random.random_sample(nSamples)*15.0 + 7.0 dtype = numpy.dtype([ ('ra', numpy.float), ('dec', numpy.float), ('mag', numpy.float) ]) cls.controlArray = numpy.rec.fromrecords([(r, d, m) for r, d, m in zip(raList, decList, magList)], dtype=dtype) dbDtype = numpy.dtype([ ('id', numpy.int), ('ra', numpy.float), ('dec', numpy.float), ('mag', numpy.float) ]) if os.path.exists(cls.textFileName): os.unlink(cls.textFileName) with open(cls.textFileName, 'w') as output: output.write('# id ra dec mag\n') for ix, (r, d, m) in enumerate(zip(raList, decList, magList)): output.write('%d %.20f %.20f %.20f\n' % (ix, r, d, m)) cls.dbName = os.path.join(cls.baseDir, 'compound_obs_metadata_db.db') if os.path.exists(cls.dbName): os.unlink(cls.dbName) fileDBObject(cls.textFileName, runtable='test', database=cls.dbName, dtype=dbDtype, idColKey='id')
def setUpClass(cls): cls.scratch_dir = tempfile.mkdtemp(dir=ROOT, prefix="scratchSpace-") cls.db_src_name = os.path.join(cls.scratch_dir, 'compound_cat_filter_db.txt') if os.path.exists(cls.db_src_name): os.unlink(cls.db_src_name) cls.db_name = os.path.join(cls.scratch_dir, 'compound_cat_filter_db.db') if os.path.exists(cls.db_name): os.unlink(cls.db_name) with open(cls.db_src_name, 'w') as output_file: output_file.write('#a header\n') for ii in range(10): output_file.write('%d %d %d %d\n' % (ii, ii+1, ii+2, ii+3)) dtype = np.dtype([('id', int), ('ip1', int), ('ip2', int), ('ip3', int)]) fileDBObject(cls.db_src_name, runtable='test', dtype=dtype, idColKey='id', database=cls.db_name)
def create_sn_cache(db=None): """ Read the supernova data from a database and cache it in the file $TWINKLES_DIR/data/sn_cache.db in the table 'sn_cache_table' Param ----- db is a CatalogDBObject from which to read the data. If None, use the CatSim class SNDBObj, which will connect to the TwinkSN_run3 table on fatboy. """ sn_dtype = np.dtype([('id', int), ('galtileid', int), ('snra', float), ('sndec', float), ('t0', float), ('x0', float), ('x1', float), ('c', float), ('redshift', float)]) col_names = list(sn_dtype.names) sn_cache_name = os.path.join(getPackageDir('twinkles'), 'data', 'twinkles_sn_cache.txt') if os.path.exists(sn_cache_name): os.unlink(sn_cache_name) sn_db_name = os.path.join(getPackageDir('twinkles'), 'data', 'sn_cache.db') if db is None: db = SNDBObj(table='TwinkSN_run3') result_iterator = db.query_columns(colnames=col_names, chunk_size=10000, obs_metadata=_obs) with open(sn_cache_name, 'w') as output_file: output_file.write('# ') for name in col_names: output_file.write('%s ' % name) output_file.write('\n') for chunk in result_iterator: for line in chunk: output_file.write( ('%d;%ld;%.17g;%.17g;%.17g;%.17g;%.17g;%.17g;%.17g\n' % (line[0], line[1], line[2], line[3], line[4], line[5], line[6], line[7], line[8])).replace( 'nan', 'NULL').replace('None', 'NULL')) if os.path.exists(sn_db_name): os.unlink(sn_db_name) dbo = fileDBObject(sn_cache_name, driver='sqlite', runtable='sn_cache_table', database=sn_db_name, dtype=sn_dtype, delimiter=';', idColKey='id') if os.path.exists(sn_cache_name): os.unlink(sn_cache_name)
def create_galaxy_cache(db_dir): """ Create an sqlite .db file in data/ containing all of the galaxies in the Twinkles field of view. db_dir is the directory in which we want to create the galaxy cache """ obs = ObservationMetaData(pointingRA=53.0091385, pointingDec=-27.4389488, boundType='circle', boundLength=0.31) db = GalaxyTileObjDegrees() col_names = list(_galaxy_cache_dtype.names) result_iterator = db.query_columns(colnames=col_names, chunk_size=100000, obs_metadata=obs) with open(_galaxy_cache_file_name, 'w') as output_file: output_file.write('# galtileid ') for name in col_names: output_file.write('%s ' % name) output_file.write('\n') for chunk in result_iterator: for line in chunk: output_file.write(('%ld;%.17g;%.17g;%s;%.17g;%s;%.17g;%s;%.17g;%s;%.17g;%.17g;' % (line[0], line[1], line[2], line[3], line[4], line[5], line[6], line[7], line[8], line[9], line[10], line[11])).replace('nan', 'NULL').replace('None', 'NULL') + ('%.17g;%.17g;%.17g;%.17g;%.17g;%.17g;%.17g;%.17g;%.17g;%.17g;%.17g;%.17g' % (line[12], line[13], line[14], line[15], line[16], line[17], line[18], line[19], line[20], line[21], line[22], line[23])).replace('nan', 'NULL').replace('None', 'NULL') + '\n') full_db_name = os.path.join(db_dir, _galaxy_cache_db_name) if os.path.exists(_galaxy_cache_db_name): raise RuntimeError("Trying to create %s, but it already exists" % full_db_name) dbo = fileDBObject(_galaxy_cache_file_name, driver='sqlite', runtable=_galaxy_cache_table_name, database=full_db_name, dtype=_galaxy_cache_dtype, delimiter=';', idColKey='galtileid') if os.path.exists(_galaxy_cache_file_name): os.unlink(_galaxy_cache_file_name)
def setUpClass(cls): numpy.random.seed(42) dtype = numpy.dtype([('a', numpy.float), ('b', numpy.float), ('c', numpy.float), ('d', str, 20)]) nSamples = 100 aList = numpy.random.random_sample(nSamples)*10.0 bList = numpy.random.random_sample(nSamples)*(-1.0) cList = numpy.random.random_sample(nSamples)*10.0-5.0 ww = 'a' dList = [] for ix in range(nSamples): ww += 'b' dList.append(ww) cls.controlArray = numpy.rec.fromrecords([(aa, bb, cc, dd) for aa, bb, cc, dd in zip(aList, bList, cList, dList)], dtype=dtype) cls.baseDir = tempfile.mkdtemp(dir=ROOT, prefix='scratchSpace-') cls.textFileName = os.path.join(cls.baseDir, 'compound_test_data.txt') if os.path.exists(cls.textFileName): os.unlink(cls.textFileName) with open(cls.textFileName, 'w') as output: output.write('# id a b c d\n') for ix, (aa, bb, cc, dd) in enumerate(zip(aList, bList, cList, dList)): output.write('%d %e %e %e %s\n' % (ix, aa, bb, cc, dd)) cls.dbName = os.path.join(cls.baseDir, 'compoundCatalogTestDB.db') if os.path.exists(cls.dbName): os.unlink(cls.dbName) cls.otherDbName = os.path.join(cls.baseDir, 'otherDb.db') if os.path.exists(cls.otherDbName): os.unlink(cls.otherDbName) dtype = numpy.dtype([ ('id', numpy.int), ('a', numpy.float), ('b', numpy.float), ('c', numpy.float), ('d', str, 20) ]) fileDBObject(cls.textFileName, runtable='test', database=cls.dbName, dtype=dtype, idColKey='id') fileDBObject(cls.textFileName, runtable='test', database=cls.otherDbName, dtype=dtype, idColKey='id') fileDBObject(cls.textFileName, runtable='otherTest', database=cls.dbName, dtype=dtype, idColKey='id')
def test_stars(self): obs = ObservationMetaData(bandpassName=['c_u', 'c_g'], m5=[25.0, 26.0]) db_dtype = np.dtype([('id', np.int), ('raJ2000', np.float), ('decJ2000', np.float), ('sedFilename', str, 100), ('magNorm', np.float), ('galacticAv', np.float)]) inputDir = os.path.join(getPackageDir('sims_catUtils'), 'tests', 'testData') inputFile = os.path.join(inputDir, 'IndicesTestCatalogStars.txt') db = fileDBObject(inputFile, dtype=db_dtype, runtable='test', idColKey='id') cat = CartoonStars(db, obs_metadata=obs) with lsst.utils.tests.getTempFilePath('.txt') as catName: cat.write_catalog(catName) dtype = np.dtype([(name, np.float) for name in cat.column_outputs]) controlData = np.genfromtxt(catName, dtype=dtype, delimiter=',') db_columns = db.query_columns([ 'id', 'raJ2000', 'decJ2000', 'sedFilename', 'magNorm', 'galacticAv' ]) sedDir = os.path.join(getPackageDir('sims_sed_library'), 'starSED', 'kurucz') for ix, line in enumerate(next(db_columns)): spectrum = Sed() spectrum.readSED_flambda(os.path.join(sedDir, line[3])) fnorm = spectrum.calcFluxNorm(line[4], self.normband) spectrum.multiplyFluxNorm(fnorm) a_x, b_x = spectrum.setupCCM_ab() spectrum.addDust(a_x, b_x, A_v=line[5]) umag = spectrum.calcMag(self.uband) self.assertAlmostEqual(umag, controlData['cartoon_u'][ix], 3) gmag = spectrum.calcMag(self.gband) self.assertAlmostEqual(gmag, controlData['cartoon_g'][ix], 3) umagError, gamma = calcMagError_m5(umag, self.uband, obs.m5['c_u'], PhotometricParameters()) gmagError, gamma = calcMagError_m5(gmag, self.gband, obs.m5['c_g'], PhotometricParameters()) self.assertAlmostEqual(umagError, controlData['sigma_cartoon_u'][ix], 3) self.assertAlmostEqual(gmagError, controlData['sigma_cartoon_g'][ix], 3)
def test_ingest(self): """ Test that fileDBObject correctly ingests a text file containing multiple data types. """ txt_file_name = os.path.join(self.scratch_dir, "filedbojb_ingest_test.txt") rng = np.random.RandomState(8821) alphabet = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' n_rows = 34 n_letters = 72 f_list = rng.random_sample(n_rows) i_list = rng.randint(0, 2**50, n_rows) word_dex_list = rng.randint(0, len(alphabet) - 1, (n_rows, n_letters)) word_list = [] with open(txt_file_name, 'w') as output_file: output_file.write("# a header\n") for ix, (ff, ii, ww) in enumerate(zip(f_list, i_list, word_dex_list)): word = '' for wwdex in ww: word += alphabet[wwdex] word_list.append(word) self.assertEqual(len(word), n_letters) output_file.write('%d %.13f %ld %s\n' % (ix, ff, ii, word)) dtype = np.dtype([('id', int), ('float', float), ('int', int), ('word', str, n_letters)]) db = fileDBObject(txt_file_name, runtable='test', dtype=dtype, idColKey='id') results = db.execute_arbitrary('SELECT * from test') self.assertEqual(len(results), n_rows) for row in results: i_row = row[0] self.assertAlmostEqual(f_list[i_row], row[1], 13) self.assertEqual(i_list[i_row], row[2]) self.assertEqual(word_list[i_row], row[3]) if os.path.exists(txt_file_name): os.unlink(txt_file_name)
def test_ingest(self): """ Test that fileDBObject correctly ingests a text file containing multiple data types. """ txt_file_name = os.path.join(self.scratch_dir, "filedbojb_ingest_test.txt") rng = np.random.RandomState(8821) alphabet = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' n_rows = 34 n_letters = 72 f_list = rng.random_sample(n_rows) i_list = rng.randint(0, 2**50, n_rows) word_dex_list = rng.randint(0, len(alphabet)-1, (n_rows, n_letters)) word_list = [] with open(txt_file_name, 'w') as output_file: output_file.write("# a header\n") for ix, (ff, ii, ww) in enumerate(zip(f_list, i_list, word_dex_list)): word = '' for wwdex in ww: word += alphabet[wwdex] word_list.append(word) self.assertEqual(len(word), n_letters) output_file.write('%d %.13f %ld %s\n' % (ix, ff, ii, word)) dtype = np.dtype([('id', int), ('float', float), ('int', int), ('word', str, n_letters)]) db = fileDBObject(txt_file_name, runtable='test', dtype=dtype, idColKey='id') results = db.execute_arbitrary('SELECT * from test') self.assertEqual(len(results), n_rows) for row in results: i_row = row[0] self.assertAlmostEqual(f_list[i_row], row[1], 13) self.assertEqual(i_list[i_row], row[2]) self.assertEqual(word_list[i_row], row[3]) if os.path.exists(txt_file_name): os.unlink(txt_file_name)
def test_stars(self): obs = ObservationMetaData(bandpassName=['c_u', 'c_g'], m5=[25.0, 26.0]) db_dtype = np.dtype([('id', np.int), ('raJ2000', np.float), ('decJ2000', np.float), ('sedFilename', str, 100), ('magNorm', np.float), ('galacticAv', np.float)]) inputDir = os.path.join(getPackageDir('sims_catUtils'), 'tests', 'testData') inputFile = os.path.join(inputDir, 'IndicesTestCatalogStars.txt') db = fileDBObject(inputFile, dtype=db_dtype, runtable='test', idColKey='id') cat = CartoonStars(db, obs_metadata=obs) with lsst.utils.tests.getTempFilePath('.txt') as catName: cat.write_catalog(catName) dtype = np.dtype([(name, np.float) for name in cat.column_outputs]) controlData = np.genfromtxt(catName, dtype=dtype, delimiter=',') db_columns = db.query_columns(['id', 'raJ2000', 'decJ2000', 'sedFilename', 'magNorm', 'galacticAv']) sedDir = os.path.join(getPackageDir('sims_sed_library'), 'starSED', 'kurucz') for ix, line in enumerate(next(db_columns)): spectrum = Sed() spectrum.readSED_flambda(os.path.join(sedDir, line[3])) fnorm = spectrum.calcFluxNorm(line[4], self.normband) spectrum.multiplyFluxNorm(fnorm) a_x, b_x = spectrum.setupCCM_ab() spectrum.addDust(a_x, b_x, A_v=line[5]) umag = spectrum.calcMag(self.uband) self.assertAlmostEqual(umag, controlData['cartoon_u'][ix], 3) gmag = spectrum.calcMag(self.gband) self.assertAlmostEqual(gmag, controlData['cartoon_g'][ix], 3) umagError, gamma = calcMagError_m5(umag, self.uband, obs.m5['c_u'], PhotometricParameters()) gmagError, gamma = calcMagError_m5(gmag, self.gband, obs.m5['c_g'], PhotometricParameters()) self.assertAlmostEqual(umagError, controlData['sigma_cartoon_u'][ix], 3) self.assertAlmostEqual(gmagError, controlData['sigma_cartoon_g'][ix], 3)
def write_star_file_db(file_name): np.random.seed(88) nstars = 10000 ra = np.random.random_sample(nstars) * 360.0 dec = (np.random.random_sample(nstars) - 0.5) * 180.0 umag = np.random.random_sample(nstars) * 10.0 + 15.0 gmag = np.random.random_sample(nstars) * 10.0 + 15.0 rmag = np.random.random_sample(nstars) * 10.0 + 15.0 imag = np.random.random_sample(nstars) * 10.0 + 15.0 zmag = np.random.random_sample(nstars) * 10.0 + 15.0 ymag = np.random.random_sample(nstars) * 10.0 + 15.0 with open(file_name, 'w') as output_file: for ix, (rr, dd, um, gm, rm, im, zm, ym) in \ enumerate(zip(ra, dec, umag, gmag, rmag, imag, zmag, ymag)): output_file.write( '%d %.12f %.12f %.12f %.12f %.12f %.12f %.12f %.12f\n' % (ix, rr, dd, um, gm, rm, im, zm, ym)) starDtype = np.dtype([('id', np.int), ('raJ2000', np.float), ('decJ2000', np.float), ('umag', np.float), ('gmag', np.float), ('rmag', np.float), ('imag', np.float), ('zmag', np.float), ('ymag', np.float)]) starDB = fileDBObject(file_name, runtable='stars', dtype=starDtype, idColKey='id') starDB.raColName = 'raJ2000' starDB.decColName = 'decJ2000' controlData = np.genfromtxt(file_name, dtype=starDtype) return starDB, controlData
def write_star_file_db(file_name): np.random.seed(88) nstars = 10000 ra = np.random.random_sample(nstars)*360.0 dec = (np.random.random_sample(nstars)-0.5)*180.0 umag = np.random.random_sample(nstars)*10.0 + 15.0 gmag = np.random.random_sample(nstars)*10.0 + 15.0 rmag = np.random.random_sample(nstars)*10.0 + 15.0 imag = np.random.random_sample(nstars)*10.0 + 15.0 zmag = np.random.random_sample(nstars)*10.0 + 15.0 ymag = np.random.random_sample(nstars)*10.0 + 15.0 with open(file_name, 'w') as output_file: for ix, (rr, dd, um, gm, rm, im, zm, ym) in \ enumerate(zip(ra, dec, umag, gmag, rmag, imag, zmag, ymag)): output_file.write('%d %.12f %.12f %.12f %.12f %.12f %.12f %.12f %.12f\n' % (ix, rr, dd, um, gm, rm, im, zm, ym)) starDtype = np.dtype([('id', np.int), ('raJ2000', np.float), ('decJ2000', np.float), ('umag', np.float), ('gmag', np.float), ('rmag', np.float), ('imag', np.float), ('zmag', np.float), ('ymag', np.float)]) starDB = fileDBObject(file_name, runtable='stars', dtype=starDtype, idColKey='id') starDB.raColName = 'raJ2000' starDB.decColName = 'decJ2000' controlData = np.genfromtxt(file_name, dtype=starDtype) return starDB, controlData
def test_ParametrizedLightCurve_in_catalog(self): """ Test the performance of applyParametrizedLightCurve() in the context of an InstanceCatalog """ # Create dummy light curve parameters lc_temp_file_name = tempfile.mktemp(prefix='test_ParametrizedLightCurve_in_catalog', suffix='.gz') rng = np.random.RandomState(1621145) n_c_1 = 10 a1_list = rng.random_sample(n_c_1)*5.0 b1_list = (rng.random_sample(n_c_1)-0.5)*2.0 c1_list = (rng.random_sample(n_c_1)-0.5)*0.1 omega1_list = rng.random_sample(n_c_1)*20.0 tau1_list = rng.random_sample(n_c_1)*100.0 median1 = 100.0 n_c_2 = 15 a2_list = rng.random_sample(n_c_2)*5.0 b2_list = (rng.random_sample(n_c_2)-0.5)*2.0 c2_list = (rng.random_sample(n_c_2)-0.5)*0.1 omega2_list = rng.random_sample(n_c_2)*20.0 tau2_list = rng.random_sample(n_c_2)*100.0 median2 = 200.0 with gzip.open(lc_temp_file_name, 'w') as out_file: out_file.write(b'# a header\n') out_file.write(b'kplr999990000_lc.txt 100 1.0e+02 %d ' % n_c_1) for i_c in range(n_c_1): out_file.write(b'%e ' % (1.0/(i_c+1))) out_file.write(b'%e ' % median1) for i_c in range(n_c_1): out_file.write(b'%.15e %.15e %.15e %.15e %.15e ' % (a1_list[i_c], b1_list[i_c], c1_list[i_c], omega1_list[i_c], tau1_list[i_c])) out_file.write(b'\n') out_file.write(b'kplr999990001_lc.txt 100 1.0e+02 %d ' % n_c_2) for i_c in range(n_c_2): out_file.write(b'%e ' % (1.0/(i_c+1))) out_file.write(b'%e ' % median2) for i_c in range(n_c_2): out_file.write(b'%.15e %.15e %.15e %.15e %.15e ' % (a2_list[i_c], b2_list[i_c], c2_list[i_c], omega2_list[i_c], tau2_list[i_c])) out_file.write(b'\n') # Create dummy database of astrophysical sources db_temp_file_name = tempfile.mktemp(prefix='test_ParametrizedLightCurve_in_catalog_db', suffix='.txt') lc_list = [999990001, None, 999990001, 999990000] t0_list = [1729.1, None, 2345.1, 10.9] with open(db_temp_file_name, 'w') as out_file: out_file.write('# a header\n') for i_obj in range(len(lc_list)): if lc_list[i_obj] is not None: paramStr = '{"m":"kplr", "p":{"lc":%d, "t0":%.3f}}' % (lc_list[i_obj], t0_list[i_obj]) else: paramStr = None out_file.write('%d;10.0;20.0;0.01;0.01;%s\n' % (i_obj, paramStr)) dtype = np.dtype([('simobjid', int), ('ra', float), ('dec', float), ('ebv', float), ('parallax', float), ('varParamStr', str, 100)]) db = fileDBObject(db_temp_file_name, runtable='test', dtype=dtype, delimiter=';', idColKey='simobjid') class ParametrizedVarParamStrCat(InstanceCatalog, VariabilityStars): column_outputs = ['simobjid', 'delta_lsst_u', 'delta_lsst_g', 'delta_lsst_r', 'delta_lsst_i', 'delta_lsst_z', 'delta_lsst_y'] default_formats = {'f':'%.15g'} obs = ObservationMetaData(mjd=59580.0) cat = ParametrizedVarParamStrCat(db, obs_metadata=obs) cat.load_parametrized_light_curves(lc_temp_file_name) cat_out_name = tempfile.mktemp(prefix='test_ParametrizedLightCurve_in_cat_out', suffix='.txt') cat.write_catalog(cat_out_name) kp = ParametrizedLightCurveMixin() cat_dtype = np.dtype([('simobjid', int), ('du', float), ('dg', float), ('dr', float), ('di', float), ('dz', float), ('dy', float)]) cat_data = np.genfromtxt(cat_out_name, dtype=cat_dtype, delimiter=', ') for i_obj in range(len(cat_data)): obj_id = cat_data['simobjid'][i_obj] if lc_list[obj_id] is None: self.assertEqual(cat_data['du'][i_obj], 0.0) self.assertEqual(cat_data['dg'][i_obj], 0.0) self.assertEqual(cat_data['dr'][i_obj], 0.0) self.assertEqual(cat_data['di'][i_obj], 0.0) self.assertEqual(cat_data['dz'][i_obj], 0.0) self.assertEqual(cat_data['dy'][i_obj], 0.0) else: q_flux, d_flux = kp._calc_dflux(lc_list[obj_id], obs.mjd.TAI-t0_list[obj_id]) d_mag_true = -2.5*np.log10(1.0+d_flux/q_flux) self.assertGreater(np.abs(d_mag_true), 0.0001) self.assertAlmostEqual(cat_data['du'][i_obj], d_mag_true, 15) self.assertAlmostEqual(cat_data['dg'][i_obj], d_mag_true, 15) self.assertAlmostEqual(cat_data['dr'][i_obj], d_mag_true, 15) self.assertAlmostEqual(cat_data['di'][i_obj], d_mag_true, 15) self.assertAlmostEqual(cat_data['dz'][i_obj], d_mag_true, 15) self.assertAlmostEqual(cat_data['dy'][i_obj], d_mag_true, 15) if os.path.exists(cat_out_name): os.unlink(cat_out_name) if os.path.exists(db_temp_file_name): os.unlink(db_temp_file_name) sims_clean_up() if os.path.exists(lc_temp_file_name): os.unlink(lc_temp_file_name)
def test_ParametrizedLightCurve_in_catalog(self): """ Test the performance of applyParametrizedLightCurve() in the context of an InstanceCatalog """ # Create dummy light curve parameters lc_temp_file_name = tempfile.mktemp( prefix='test_ParametrizedLightCurve_in_catalog', suffix='.gz') rng = np.random.RandomState(1621145) n_c_1 = 10 a1_list = rng.random_sample(n_c_1) * 5.0 b1_list = (rng.random_sample(n_c_1) - 0.5) * 2.0 c1_list = (rng.random_sample(n_c_1) - 0.5) * 0.1 omega1_list = rng.random_sample(n_c_1) * 20.0 tau1_list = rng.random_sample(n_c_1) * 100.0 median1 = 100.0 n_c_2 = 15 a2_list = rng.random_sample(n_c_2) * 5.0 b2_list = (rng.random_sample(n_c_2) - 0.5) * 2.0 c2_list = (rng.random_sample(n_c_2) - 0.5) * 0.1 omega2_list = rng.random_sample(n_c_2) * 20.0 tau2_list = rng.random_sample(n_c_2) * 100.0 median2 = 200.0 with gzip.open(lc_temp_file_name, 'w') as out_file: out_file.write(b'# a header\n') out_file.write(b'kplr999990000_lc.txt 100 1.0e+02 %d ' % n_c_1) for i_c in range(n_c_1): out_file.write(b'%e ' % (1.0 / (i_c + 1))) out_file.write(b'%e ' % median1) for i_c in range(n_c_1): out_file.write(b'%.15e %.15e %.15e %.15e %.15e ' % (a1_list[i_c], b1_list[i_c], c1_list[i_c], omega1_list[i_c], tau1_list[i_c])) out_file.write(b'\n') out_file.write(b'kplr999990001_lc.txt 100 1.0e+02 %d ' % n_c_2) for i_c in range(n_c_2): out_file.write(b'%e ' % (1.0 / (i_c + 1))) out_file.write(b'%e ' % median2) for i_c in range(n_c_2): out_file.write(b'%.15e %.15e %.15e %.15e %.15e ' % (a2_list[i_c], b2_list[i_c], c2_list[i_c], omega2_list[i_c], tau2_list[i_c])) out_file.write(b'\n') # Create dummy database of astrophysical sources db_temp_file_name = tempfile.mktemp( prefix='test_ParametrizedLightCurve_in_catalog_db', suffix='.txt') lc_list = [999990001, None, 999990001, 999990000] t0_list = [1729.1, None, 2345.1, 10.9] with open(db_temp_file_name, 'w') as out_file: out_file.write('# a header\n') for i_obj in range(len(lc_list)): if lc_list[i_obj] is not None: paramStr = '{"m":"kplr", "p":{"lc":%d, "t0":%.3f}}' % ( lc_list[i_obj], t0_list[i_obj]) else: paramStr = None out_file.write('%d;10.0;20.0;0.01;0.01;%s\n' % (i_obj, paramStr)) dtype = np.dtype([('simobjid', int), ('ra', float), ('dec', float), ('ebv', float), ('parallax', float), ('varParamStr', str, 100)]) db = fileDBObject(db_temp_file_name, runtable='test', dtype=dtype, delimiter=';', idColKey='simobjid') class ParametrizedVarParamStrCat(InstanceCatalog, VariabilityStars): column_outputs = [ 'simobjid', 'delta_lsst_u', 'delta_lsst_g', 'delta_lsst_r', 'delta_lsst_i', 'delta_lsst_z', 'delta_lsst_y' ] default_formats = {'f': '%.15g'} obs = ObservationMetaData(mjd=59580.0) cat = ParametrizedVarParamStrCat(db, obs_metadata=obs) cat.load_parametrized_light_curves(lc_temp_file_name) cat_out_name = tempfile.mktemp( prefix='test_ParametrizedLightCurve_in_cat_out', suffix='.txt') cat.write_catalog(cat_out_name) kp = ParametrizedLightCurveMixin() cat_dtype = np.dtype([('simobjid', int), ('du', float), ('dg', float), ('dr', float), ('di', float), ('dz', float), ('dy', float)]) cat_data = np.genfromtxt(cat_out_name, dtype=cat_dtype, delimiter=', ') for i_obj in range(len(cat_data)): obj_id = cat_data['simobjid'][i_obj] if lc_list[obj_id] is None: self.assertEqual(cat_data['du'][i_obj], 0.0) self.assertEqual(cat_data['dg'][i_obj], 0.0) self.assertEqual(cat_data['dr'][i_obj], 0.0) self.assertEqual(cat_data['di'][i_obj], 0.0) self.assertEqual(cat_data['dz'][i_obj], 0.0) self.assertEqual(cat_data['dy'][i_obj], 0.0) else: q_flux, d_flux = kp._calc_dflux(lc_list[obj_id], obs.mjd.TAI - t0_list[obj_id]) d_mag_true = -2.5 * np.log10(1.0 + d_flux / q_flux) self.assertGreater(np.abs(d_mag_true), 0.0001) self.assertAlmostEqual(cat_data['du'][i_obj], d_mag_true, 15) self.assertAlmostEqual(cat_data['dg'][i_obj], d_mag_true, 15) self.assertAlmostEqual(cat_data['dr'][i_obj], d_mag_true, 15) self.assertAlmostEqual(cat_data['di'][i_obj], d_mag_true, 15) self.assertAlmostEqual(cat_data['dz'][i_obj], d_mag_true, 15) self.assertAlmostEqual(cat_data['dy'][i_obj], d_mag_true, 15) if os.path.exists(cat_out_name): os.unlink(cat_out_name) if os.path.exists(db_temp_file_name): os.unlink(db_temp_file_name) sims_clean_up() if os.path.exists(lc_temp_file_name): os.unlink(lc_temp_file_name)
def test_different_cameras(self): rng = np.random.RandomState(6512) pointing_ra = 15.0 pointing_dec = 13.0 n_obj = 100 ra_list = pointing_ra + 2.0*rng.random_sample(n_obj) dec_list = pointing_dec + 2.0*rng.random_sample(n_obj) px_list = radiansFromArcsec(0.005)*rng.random_sample(n_obj) px_list += radiansFromArcsec(0.001) mura_list = radiansFromArcsec(0.005)*rng.random_sample(n_obj) mudec_list = radiansFromArcsec(0.005)*rng.random_sample(n_obj) vrad_list = 100.0*rng.random_sample(n_obj) with lsst.utils.tests.getTempFilePath('.txt') as db_text_file: with open(db_text_file, 'w') as out_file: for ix, (rdeg, ddeg, rrad, drad, px, mura, mudec, vrad) in \ enumerate(zip(ra_list, dec_list, np.radians(ra_list), np.radians(dec_list), px_list, mura_list, mudec_list, vrad_list)): out_file.write('%d %e %e %e %e %e %e %e %e\n' % (ix, rdeg, ddeg, rrad, drad, px, mura, mudec, vrad)) dtype = np.dtype([('id', int), ('raDeg', float), ('decDeg', float), ('raJ2000', float), ('decJ2000', float), ('parallax', float), ('properMotionRa', float), ('properMotionDec', float), ('radialVelocity', float)]) db = fileDBObject(db_text_file, dtype=dtype, idColKey='id') db.raColName = 'raDeg' db.decColName = 'decDeg' class CameraCoordsCatalog(AstrometryStars, CameraCoords, InstanceCatalog): camera = LsstSimMapper().camera column_outputs = ['id', 'chipName'] class CameraCoordsLSSTCatalog(AstrometryStars, CameraCoordsLSST, InstanceCatalog): column_outputs = ['id', 'chipName'] obs = ObservationMetaData(pointingRA=pointing_ra, pointingDec=pointing_dec, boundLength=1.75, boundType='circle', rotSkyPos=23.0, mjd=59580.0) control_cat = CameraCoordsCatalog(db, obs_metadata=obs) test_cat = CameraCoordsLSSTCatalog(db, obs_metadata=obs) control_line_list = [] none_chips = 0 for line in control_cat.iter_catalog(): if line[1] is None: none_chips += 1 control_line_list.append(line) self.assertGreater(len(control_line_list), 0) self.assertLess(none_chips, len(control_line_list)/2) line_ct = 0 for line in test_cat.iter_catalog(): line_ct += 1 self.assertIn(line, control_line_list) self.assertEqual(line_ct, len(control_line_list))
def setUpClass(cls): """ Create a fake catalog of RR Lyrae stars. Store it in cls.stellar_db """ cls.scratchDir = tempfile.mkdtemp(dir=ROOT, prefix='StellarLigghtCurveTest-') rng = np.random.RandomState(88) n_stars = 10000 sed_dir = os.path.join(getPackageDir("sims_sed_library")) sed_dir = os.path.join(sed_dir, "starSED", "kurucz") list_of_seds = os.listdir(sed_dir) lc_dir = os.path.join(getPackageDir("sims_sed_library"), "rrly_lc") lc_dir = os.path.join(lc_dir, "RRab") list_of_lc = ['rrly_lc/RRab/%s' % ww for ww in os.listdir(lc_dir) if "per.txt" in ww] cls.dtype = np.dtype([('id', np.int), ('raDeg', np.float), ('decDeg', np.float), ('raJ2000', np.float), ('decJ2000', np.float), ('magNorm', np.float), ('galacticAv', np.float), ('sedFilename', str, 300), ('varParamStr', str, 300), ('parallax', np.float), ('ebv', np.float)]) # write the catalog as a text file to be ingested with fileDBObject cls.txt_name = os.path.join(cls.scratchDir, "stellar_lc_catalog.txt") with open(cls.txt_name, "w") as output_file: sed_dex = rng.randint(0, len(list_of_seds), size=n_stars) lc_dex = rng.randint(0, len(list_of_lc), size=n_stars) mjd0 = rng.random_sample(n_stars)*10000.0+40000.0 raList = rng.random_sample(n_stars)*360.0 decList = -90.0 + rng.random_sample(n_stars)*120.0 magNormList = rng.random_sample(n_stars)*3.0+14.0 AvList = rng.random_sample(n_stars)*0.2+0.1 pxList = rng.random_sample(n_stars)*0.1 for ix in range(n_stars): varparams = {'varMethodName': 'applyRRly', 'pars': {'tStartMjd': mjd0[ix], 'filename': list_of_lc[lc_dex[ix]]}} varparamstr = json.dumps(varparams) output_file.write("%d;%lf;%lf;%lf;%lf;%lf;%lf;%s;%s;%lf;%lf\n" % (ix, raList[ix], decList[ix], np.radians(raList[ix]), np.radians(decList[ix]), magNormList[ix], AvList[ix], list_of_seds[sed_dex[ix]], varparamstr,pxList[ix], AvList[ix]/3.1)) cls.stellar_db = fileDBObject(cls.txt_name, delimiter=';', runtable='test', dtype=cls.dtype, idColKey='id') cls.stellar_db.raColName = 'raDeg' cls.stellar_db.decColName = 'decDeg' cls.stellar_db.objectTypeId = 32 cls.opsimDb = os.path.join(getPackageDir("sims_data"), "OpSimData") cls.opsimDb = os.path.join(cls.opsimDb, "opsimblitz1_1133_sqlite.db")
def setUpClass(cls): """ Create a fake catalog of RR Lyrae stars and MLT dwarves with flaring light curves. Store it in cls.stellar_db """ cls.scratchDir = tempfile.mkdtemp( dir=ROOT, prefix='FastStellar_stellar_lc_gen_case-') cls.raRange = (78.0, 85.0) cls.decRange = (-69.0, -65.0) rng = np.random.RandomState(88) cls.n_stars = 20 sed_dir = os.path.join(getPackageDir("sims_sed_library")) sed_dir = os.path.join(sed_dir, "starSED", "kurucz") list_of_seds = os.listdir(sed_dir) lc_dir = os.path.join(getPackageDir("sims_sed_library"), "rrly_lc") lc_dir = os.path.join(lc_dir, "RRab") list_of_rrly_lc = [ 'rrly_lc/RRab/%s' % ww for ww in os.listdir(lc_dir) if "per.txt" in ww ] cls.mlt_lc_file_name = os.path.join(cls.scratchDir, "fast_lc_mlt_file.npz") if os.path.exists(cls.mlt_lc_file_name): os.unlink(cls.mlt_lc_file_name) mlt_lc_files = {} mlt_lc_files['lc_1_time'] = np.arange(0.0, 3652.51, 0.1) mlt_lc_files['lc_1_g'] = 2.2e32 * np.power( np.cos(mlt_lc_files['lc_1_time'] / 100.0 - 5.0), 2) mlt_lc_files['lc_1_r'] = 1.3e32 * ( 1.0 + np.sin(mlt_lc_files['lc_1_time'] / 100.0 - 3.0)) mlt_lc_files['lc_2_time'] = np.arange(0.0, 3652.51, 0.1) mlt_lc_files['lc_2_g'] = 5.1e33 * ( 1.0 + np.cos(mlt_lc_files['lc_2_time'] / 300.0 - 10.0)) mlt_lc_files['lc_2_r'] = 4.3e32 * ( 1.0 + np.sin(mlt_lc_files['lc_2_time'] / 50.0 - 71.0)) with open(cls.mlt_lc_file_name, 'wb') as file_handle: np.savez(file_handle, **mlt_lc_files) cls.dtype = np.dtype([('id', np.int), ('raDeg', np.float), ('decDeg', np.float), ('raJ2000', np.float), ('decJ2000', np.float), ('magNorm', np.float), ('galacticAv', np.float), ('sedFilename', str, 300), ('varParamStr', str, 300), ('parallax', np.float), ('ebv', np.float)]) # write the catalog as a text file to be ingested with fileDBObject cls.txt_name = os.path.join(cls.scratchDir, "fast_stellar_lc_catalog.txt") with open(cls.txt_name, "w") as output_file: output_file.write('# a silly header\n') sed_dex = rng.randint(0, len(list_of_seds), size=cls.n_stars // 2) lc_dex = rng.randint(0, len(list_of_rrly_lc), size=cls.n_stars // 2) mjd0 = rng.random_sample(cls.n_stars // 2) * 10000.0 + 40000.0 raList = rng.random_sample(cls.n_stars // 2) * ( cls.raRange[1] - cls.raRange[0]) + cls.raRange[0] decList = cls.decRange[0] + rng.random_sample( cls.n_stars // 2) * (cls.decRange[1] - cls.decRange[1]) magNormList = rng.random_sample(cls.n_stars // 2) * 3.0 + 14.0 AvList = rng.random_sample(cls.n_stars // 2) * 0.2 + 0.1 pxList = rng.random_sample(cls.n_stars // 2) * 0.1 for ix in range(cls.n_stars // 2): varparams = { 'varMethodName': 'applyRRly', 'pars': { 'tStartMjd': mjd0[ix], 'filename': list_of_rrly_lc[lc_dex[ix]] } } varparamstr = json.dumps(varparams) output_file.write( "%d;%lf;%lf;%lf;%lf;%lf;%lf;%s;%s;%lf;%lf\n" % (ix, raList[ix], decList[ix], np.radians( raList[ix]), np.radians(decList[ix]), magNormList[ix], AvList[ix], list_of_seds[sed_dex[ix]], varparamstr, pxList[ix], AvList[ix] / 3.1)) sed_dex = rng.randint(0, len(list_of_seds), size=cls.n_stars // 2) lc_dex = rng.randint(1, 3, size=cls.n_stars // 2) mjd0 = rng.random_sample(cls.n_stars // 2) * 10000.0 + 40000.0 raList = rng.random_sample(cls.n_stars // 2) * ( cls.raRange[1] - cls.raRange[0]) + cls.raRange[0] decList = cls.decRange[0] + rng.random_sample( cls.n_stars // 2) * (cls.decRange[1] - cls.decRange[1]) magNormList = rng.random_sample(cls.n_stars // 2) * 3.0 + 14.0 AvList = rng.random_sample(cls.n_stars // 2) * 0.2 + 0.1 pxList = rng.random_sample(cls.n_stars // 2) * 0.1 for ix in range(cls.n_stars // 2): varparams = { 'm': 'MLT', 'p': { 'lc': 'lc_%d' % lc_dex[ix], 't0': rng.random_sample() * 1000.0 } } varparamstr = json.dumps(varparams) output_file.write( "%d;%lf;%lf;%lf;%lf;%lf;%lf;%s;%s;%lf;%lf\n" % (ix + cls.n_stars / 2, raList[ix], decList[ix], np.radians(raList[ix]), np.radians(decList[ix]), magNormList[ix], AvList[ix], list_of_seds[sed_dex[ix]], varparamstr, pxList[ix], AvList[ix] / 3.1)) cls.stellar_db = fileDBObject(cls.txt_name, delimiter=';', runtable='test', dtype=cls.dtype, idColKey='id') cls.stellar_db.raColName = 'raDeg' cls.stellar_db.decColName = 'decDeg' cls.stellar_db.objectTypeId = 32 cls.opsimDb = os.path.join(getPackageDir("sims_data"), "OpSimData") cls.opsimDb = os.path.join(cls.opsimDb, "opsimblitz1_1133_sqlite.db")
def test_InstanceCatalog_against_catalog_chunks(self): """ Test that we can reproduce the validated data using the InstanceCatalog framework when the catalog must be written in multiple chunks """ obs = ObservationMetaData(pointingRA=53.00913847303155535, pointingDec=-27.43894880881512321, rotSkyPos=256.75075318193080420, mjd=59580.13955500000156462, bandpassName='r', site=Site(name="LSST", pressure=0.0, humidity=0.0)) data_dir = os.path.join(getPackageDir('sims_catUtils'), 'tests', 'testData') dtype = np.dtype([('id', int), ('ra', float), ('dec', float), ('ra_deprecessed', float), ('dec_deprecessed', float), ('x_dm', float), ('y_dm', float), ('x_focal', float), ('y_focal', float), ('x_cam', float), ('y_cam', float)]) data = np.genfromtxt(os.path.join(data_dir, 'pixel_prediction_catalog.txt'), dtype=dtype) data_txt_file = tempfile.mktemp(dir=data_dir, prefix='ic_validation_cat', suffix='.txt') cat_dtype = np.dtype([('id', int), ('raJ2000', float), ('decJ2000', float)]) with open(data_txt_file, 'w') as out_file: out_file.write('# a header\n') for ii, rr, dd in zip(data['id'], np.radians(data['ra']), np.radians(data['dec'])): out_file.write('%d %.17f %.17f\n' % (ii, rr, dd)) db = fileDBObject(data_txt_file, idColKey='id', dtype=cat_dtype, delimiter=' ') class DeprecessionTestCatalog_chunks(PhoSimCatalogPoint): def get_uniqueId(self): return self.column_by_name('id') def get_properMotionRa(self): return np.zeros(len(self.column_by_name('raJ2000'))) def get_properMotionDec(self): return np.zeros(len(self.column_by_name('raJ2000'))) def get_radialVelocity(self): return np.zeros(len(self.column_by_name('raJ2000'))) def get_parallax(self): return np.zeros(len(self.column_by_name('raJ2000'))) def get_galacticAv(self): return np.zeros(len(self.column_by_name('raJ2000'))) def get_galacticRv(self): return 3.1 * np.ones(len(self.column_by_name('raJ2000'))) def get_sedFilepath(self): return np.array(['sed_flat.txt.gz'] * len(self.column_by_name('raJ2000'))) def get_phoSimMagNorm(self): return np.ones(len(self.column_by_name('raJ2000'))) cat = DeprecessionTestCatalog_chunks(db, obs_metadata=obs) cat.phoSimHeaderMap = DefaultPhoSimHeaderMap id_list = [] ra_dep_list = [] dec_dep_list = [] phosim_cat_name = tempfile.mktemp(dir=data_dir, prefix='phosim_dep', suffix='.txt') cat.write_catalog(phosim_cat_name, chunk_size=10) with open(phosim_cat_name, 'r') as input_file: for line in input_file: params = line.strip().split() if len(params) < 3: continue id_list.append(int(params[1])) ra_dep_list.append(float(params[2])) dec_dep_list.append(float(params[3])) id_list = np.array(id_list) np.testing.assert_array_equal(id_list, data['id']) ra_dep_list = np.array(ra_dep_list) dec_dep_list = np.array(dec_dep_list) dd = 3600.0 * angularSeparation(data['ra_deprecessed'], data['dec_deprecessed'], ra_dep_list, dec_dep_list) self.assertLess(dd.max(), 1.0e-5) if os.path.exists(data_txt_file): os.unlink(data_txt_file) if os.path.exists(phosim_cat_name): os.unlink(phosim_cat_name)
def setUpClass(cls): cls.scratch_dir = tempfile.mkdtemp(dir=ROOT, prefix="CompoundCatalogTest") cls.table1FileName = os.path.join(cls.scratch_dir, 'compound_table1.txt') cls.table2FileName = os.path.join(cls.scratch_dir, 'compound_table2.txt') if os.path.exists(cls.table1FileName): os.unlink(cls.table1FileName) if os.path.exists(cls.table2FileName): os.unlink(cls.table2FileName) dtype1 = np.dtype([('ra', np.float), ('dec', np.float), ('mag', np.float), ('dmag', np.float), ('dra', np.float), ('ddec', np.float)]) dbDtype1 = np.dtype([('id', np.int), ('ra', np.float), ('dec', np.float), ('mag', np.float), ('dmag', np.float), ('dra', np.float), ('ddec', np.float)]) nPts = 100 np.random.seed(42) raList = np.random.random_sample(nPts)*360.0 decList = np.random.random_sample(nPts)*180.0-90.0 magList = np.random.random_sample(nPts)*10.0+15.0 dmagList = np.random.random_sample(nPts)*10.0 - 5.0 draList = np.random.random_sample(nPts)*5.0 - 2.5 ddecList = np.random.random_sample(nPts)*(-2.0) - 4.0 cls.table1Control = np.rec.fromrecords([(r, d, mm, dm, dr, dd) for r, d, mm, dm, dr, dd in zip(raList, decList, magList, dmagList, draList, ddecList)], dtype=dtype1) with open(cls.table1FileName, 'w') as output: output.write("# id ra dec mag dmag dra ddec\n") for ix, (r, d, mm, dm, dr, dd) in \ enumerate(zip(raList, decList, magList, dmagList, draList, ddecList)): output.write('%d %.12f %.12f %.12f %.12f %.12f %.12f\n' % (ix, r, d, mm, dm, dr, dd)) dtype2 = np.dtype([('ra', np.float), ('dec', np.float), ('mag', np.float)]) dbDtype2 = np.dtype([('id', np.int), ('ra', np.float), ('dec', np.float), ('mag', np.float)]) ra2List = np.random.random_sample(nPts)*360.0 dec2List = np.random.random_sample(nPts)*180.0-90.0 mag2List = np.random.random_sample(nPts)*10+18.0 cls.table2Control = np.rec.fromrecords([(r, d, m) for r, d, m in zip(ra2List, dec2List, mag2List)], dtype=dtype2) with open(cls.table2FileName, 'w') as output: output.write('# id ra dec mag\n') for ix, (r, d, m) in enumerate(zip(ra2List, dec2List, mag2List)): output.write('%d %.12f %.12f %.12f\n' % (ix, r, d, m)) cls.dbName = os.path.join(cls.scratch_dir, 'compound_db.db') if os.path.exists(cls.dbName): os.unlink(cls.dbName) fileDBObject(cls.table1FileName, runtable='table1', database=cls.dbName, dtype=dbDtype1, idColKey='id') fileDBObject(cls.table2FileName, runtable='table2', database=cls.dbName, dtype=dbDtype2, idColKey='id')
def test_mixed_stars(self): """ Here we will test the (somewhat absurd) case of a catalog with two different bandpasses (lsst_ and cartoon_) in order to verify that gamma values are being cached correctly """ lsst_u_band = Bandpass() lsst_u_band.readThroughput(os.path.join(getPackageDir('throughputs'), 'baseline', 'total_u.dat')) lsst_g_band = Bandpass() lsst_g_band.readThroughput(os.path.join(getPackageDir('throughputs'), 'baseline', 'total_g.dat')) obs = ObservationMetaData(bandpassName=['c_u', 'c_g', 'u', 'g'], m5=[25.0, 26.0, 15.0, 16.0]) # make the difference in m5 between the two bandpass systems extreme # so that, in the unit test, we can be sure that the correct values # are being used for the correct getters db_dtype = np.dtype([('id', np.int), ('raJ2000', np.float), ('decJ2000', np.float), ('sedFilename', str, 100), ('magNorm', np.float), ('galacticAv', np.float)]) inputDir = os.path.join(getPackageDir('sims_catUtils'), 'tests', 'testData') inputFile = os.path.join(inputDir, 'IndicesTestCatalogStars.txt') db = fileDBObject(inputFile, dtype=db_dtype, runtable='test', idColKey='id') cat = CartoonStars(db, obs_metadata=obs, column_outputs=['lsst_u', 'lsst_g', 'sigma_lsst_u', 'sigma_lsst_g']) with lsst.utils.tests.getTempFilePath('.txt') as catName: cat.write_catalog(catName) dtype = np.dtype([(name, np.float) for name in cat._column_outputs]) controlData = np.genfromtxt(catName, dtype=dtype, delimiter=',') db_columns = db.query_columns(['id', 'raJ2000', 'decJ2000', 'sedFilename', 'magNorm', 'galacticAv']) sedDir = os.path.join(getPackageDir('sims_sed_library'), 'starSED', 'kurucz') for ix, line in enumerate(next(db_columns)): spectrum = Sed() spectrum.readSED_flambda(os.path.join(sedDir, line[3])) fnorm = spectrum.calcFluxNorm(line[4], self.normband) spectrum.multiplyFluxNorm(fnorm) a_x, b_x = spectrum.setupCCM_ab() spectrum.addDust(a_x, b_x, A_v=line[5]) umag = spectrum.calcMag(self.uband) self.assertAlmostEqual(umag, controlData['cartoon_u'][ix], 3) gmag = spectrum.calcMag(self.gband) self.assertAlmostEqual(gmag, controlData['cartoon_g'][ix], 3) lsst_umag = spectrum.calcMag(lsst_u_band) self.assertAlmostEqual(lsst_umag, controlData['lsst_u'][ix], 3) lsst_gmag = spectrum.calcMag(lsst_g_band) self.assertAlmostEqual(lsst_gmag, controlData['lsst_g'][ix], 3) umagError, gamma = calcMagError_m5(umag, self.uband, obs.m5['c_u'], PhotometricParameters()) gmagError, gamma = calcMagError_m5(gmag, self.gband, obs.m5['c_g'], PhotometricParameters()) self.assertAlmostEqual(umagError, controlData['sigma_cartoon_u'][ix], 3) self.assertAlmostEqual(gmagError, controlData['sigma_cartoon_g'][ix], 3) lsst_umagError, gamma = calcMagError_m5(lsst_umag, lsst_u_band, obs.m5['u'], PhotometricParameters()) lsst_gmagError, gamma = calcMagError_m5(lsst_gmag, lsst_g_band, obs.m5['g'], PhotometricParameters()) self.assertAlmostEqual(lsst_umagError, controlData['sigma_lsst_u'][ix], 3) self.assertAlmostEqual(lsst_gmagError, controlData['sigma_lsst_g'][ix], 3) self.assertGreater(np.abs(lsst_umagError-umagError), 0.01) self.assertGreater(np.abs(lsst_gmagError-gmagError), 0.01)
def test_InstanceCatalog_against_catalog_chunks(self): """ Test that we can reproduce the validated data using the InstanceCatalog framework when the catalog must be written in multiple chunks """ obs = ObservationMetaData(pointingRA=53.00913847303155535, pointingDec=-27.43894880881512321, rotSkyPos=256.75075318193080420, mjd=59580.13955500000156462, bandpassName='r', site=Site(name="LSST", pressure=0.0, humidity=0.0)) data_dir = os.path.join(getPackageDir('sims_catUtils'),'tests', 'testData') dtype = np.dtype([('id', int), ('ra', float), ('dec', float), ('ra_deprecessed', float), ('dec_deprecessed', float), ('x_dm', float), ('y_dm', float), ('x_focal', float), ('y_focal', float), ('x_cam', float), ('y_cam', float)]) data = np.genfromtxt(os.path.join(data_dir, 'pixel_prediction_catalog.txt'), dtype=dtype) data_txt_file = tempfile.mktemp(dir=data_dir, prefix='ic_validation_cat', suffix='.txt') cat_dtype = np.dtype([('id', int), ('raJ2000', float), ('decJ2000', float)]) with open(data_txt_file, 'w') as out_file: out_file.write('# a header\n') for ii, rr, dd in zip(data['id'], np.radians(data['ra']), np.radians(data['dec'])): out_file.write('%d %.17f %.17f\n' % (ii, rr, dd)) db = fileDBObject(data_txt_file, idColKey='id', dtype=cat_dtype, delimiter=' ') class DeprecessionTestCatalog_chunks(PhoSimCatalogPoint): def get_uniqueId(self): return self.column_by_name('id') def get_properMotionRa(self): return np.zeros(len(self.column_by_name('raJ2000'))) def get_properMotionDec(self): return np.zeros(len(self.column_by_name('raJ2000'))) def get_radialVelocity(self): return np.zeros(len(self.column_by_name('raJ2000'))) def get_parallax(self): return np.zeros(len(self.column_by_name('raJ2000'))) def get_galacticAv(self): return np.zeros(len(self.column_by_name('raJ2000'))) def get_galacticRv(self): return 3.1*np.ones(len(self.column_by_name('raJ2000'))) def get_sedFilepath(self): return np.array(['sed_flat.txt.gz']*len(self.column_by_name('raJ2000'))) def get_phoSimMagNorm(self): return np.ones(len(self.column_by_name('raJ2000'))) cat = DeprecessionTestCatalog_chunks(db, obs_metadata=obs) cat.phoSimHeaderMap = DefaultPhoSimHeaderMap id_list = [] ra_dep_list = [] dec_dep_list= [] phosim_cat_name = tempfile.mktemp(dir=data_dir, prefix='phosim_dep', suffix='.txt') cat.write_catalog(phosim_cat_name, chunk_size=10) with open(phosim_cat_name, 'r') as input_file: for line in input_file: params = line.strip().split() if len(params) < 3: continue id_list.append(int(params[1])) ra_dep_list.append(float(params[2])) dec_dep_list.append(float(params[3])) id_list = np.array(id_list) np.testing.assert_array_equal(id_list, data['id']) ra_dep_list = np.array(ra_dep_list) dec_dep_list = np.array(dec_dep_list) dd = 3600.0*angularSeparation(data['ra_deprecessed'], data['dec_deprecessed'], ra_dep_list, dec_dep_list) self.assertLess(dd.max(), 1.0e-5) if os.path.exists(data_txt_file): os.unlink(data_txt_file) if os.path.exists(phosim_cat_name): os.unlink(phosim_cat_name)
def testGalSimPhoSimCat(self): """ Run a GalSimPhoSim catalog on some data. Then, generate an ordinary PhoSim catalog using the same data. Verify that the two resulting PhoSim catalogs are identical. """ galsim_cat_name = os.path.join(self.dataDir, 'galSimPhoSim_galsim_cat.txt') phosim_cat_name = os.path.join(self.dataDir, 'galSimPhoSim_phosim_cat.txt') galsim_image_root = os.path.join(self.dataDir, 'galSimPhoSim_images') db = fileDBObject(self.bulge_name, dtype=self.dtype, runtable='test_bulges', idColKey='id') db.raColName = 'ra_deg' db.decColName = 'dec_deg' db.objectTypeId = 55 gs_cat = GalSimPhoSimGalaxies(db, obs_metadata=self.obs) gs_cat.bandpassNames = self.obs.bandpass gs_cat.PSF = SNRdocumentPSF() gs_cat.phoSimHeaderMap = {} gs_cat.write_catalog(galsim_cat_name) gs_cat_0 = gs_cat ps_cat = PhoSimCatalogSersic2D(db, obs_metadata=self.obs) ps_cat.phoSimHeaderMap = {} ps_cat.write_catalog(phosim_cat_name) db = fileDBObject(self.disk_name, dtype=self.dtype, runtable='test_disks', idColKey='id') db.raColName = 'ra_deg' db.decColName = 'dec_deg' db.objectTypeId = 155 gs_cat = GalSimPhoSimGalaxies(db, obs_metadata=self.obs) gs_cat.bandpassNames = self.obs.bandpass gs_cat.copyGalSimInterpreter(gs_cat_0) gs_cat.write_catalog(galsim_cat_name, write_header=False, write_mode='a') gs_cat_0 = gs_cat ps_cat = PhoSimCatalogSersic2D(db, obs_metadata=self.obs) ps_cat.write_catalog(phosim_cat_name, write_header=False, write_mode='a') db = fileDBObject(self.agn_name, dtype=self.dtype, runtable='test_agn', idColKey='id') db.raColName = 'ra_deg' db.decColName = 'dec_deg' db.objectTypeId = 255 gs_cat = GalSimPhoSimAgn(db, obs_metadata=self.obs) gs_cat.bandpassNames = self.obs.bandpass gs_cat.copyGalSimInterpreter(gs_cat_0) gs_cat.write_catalog(galsim_cat_name, write_header=False, write_mode='a') gs_cat_0 = gs_cat ps_cat = PhoSimCatalogZPoint(db, obs_metadata=self.obs) ps_cat.write_catalog(phosim_cat_name, write_header=False, write_mode='a') db = fileDBObject(self.star_name, dtype=self.dtype, runtable='test_agn', idColKey='id') db.raColName = 'ra_deg' db.decColName = 'dec_deg' db.objectTypeId = 255 gs_cat = GalSimPhoSimStars(db, obs_metadata=self.obs) gs_cat.bandpassNames = self.obs.bandpass gs_cat.copyGalSimInterpreter(gs_cat_0) gs_cat.write_catalog(galsim_cat_name, write_header=False, write_mode='a') ps_cat = PhoSimCatalogPoint(db, obs_metadata=self.obs) ps_cat.write_catalog(phosim_cat_name, write_header=False, write_mode='a') written_files = gs_cat.write_images(nameRoot=galsim_image_root) self.assertGreater(len(written_files), 0) for name in written_files: os.unlink(name) with open(galsim_cat_name, 'r') as galsim_input: with open(phosim_cat_name, 'r') as phosim_input: galsim_lines = galsim_input.readlines() phosim_lines = phosim_input.readlines() self.assertEqual(len(galsim_lines), len(phosim_lines)) self.assertEqual(len(galsim_lines), 4*self.n_objects+7) for line in galsim_lines: self.assertIn(line, phosim_lines) for line in phosim_lines: self.assertIn(line, galsim_lines) if os.path.exists(galsim_cat_name): os.unlink(galsim_cat_name) if os.path.exists(phosim_cat_name): os.unlink(phosim_cat_name)
def test_mixed_stars(self): """ Here we will test the (somewhat absurd) case of a catalog with two different bandpasses (lsst_ and cartoon_) in order to verify that gamma values are being cached correctly """ lsst_u_band = Bandpass() lsst_u_band.readThroughput(os.path.join(getPackageDir('throughputs'), 'baseline', 'total_u.dat')) lsst_g_band = Bandpass() lsst_g_band.readThroughput(os.path.join(getPackageDir('throughputs'), 'baseline', 'total_g.dat')) obs = ObservationMetaData(bandpassName=['c_u', 'c_g', 'u', 'g'], m5=[25.0, 26.0, 15.0, 16.0]) # make the difference in m5 between the two bandpass systems extreme # so that, in the unit test, we can be sure that the correct values # are being used for the correct getters db_dtype = np.dtype([('id', np.int), ('raJ2000', np.float), ('decJ2000', np.float), ('sedFilename', str, 100), ('magNorm', np.float), ('galacticAv', np.float)]) inputDir = os.path.join(getPackageDir('sims_catUtils'), 'tests', 'testData') inputFile = os.path.join(inputDir, 'IndicesTestCatalogStars.txt') db = fileDBObject(inputFile, dtype=db_dtype, runtable='test', idColKey='id') catName = os.path.join(getPackageDir('sims_catUtils'), 'tests', 'scratchSpace', 'cartoonStarCat.txt') cat = CartoonStars(db, obs_metadata=obs, column_outputs=['lsst_u', 'lsst_g', 'sigma_lsst_u', 'sigma_lsst_g']) cat.write_catalog(catName) dtype = np.dtype([(name, np.float) for name in cat._column_outputs]) controlData = np.genfromtxt(catName, dtype=dtype, delimiter=',') if os.path.exists(catName): os.unlink(catName) db_columns = db.query_columns(['id', 'raJ2000', 'decJ2000', 'sedFilename', 'magNorm', 'galacticAv']) sedDir = os.path.join(getPackageDir('sims_sed_library'), 'starSED', 'kurucz') for ix, line in enumerate(next(db_columns)): spectrum = Sed() spectrum.readSED_flambda(os.path.join(sedDir, line[3])) fnorm = spectrum.calcFluxNorm(line[4], self.normband) spectrum.multiplyFluxNorm(fnorm) a_x, b_x = spectrum.setupCCMab() spectrum.addCCMDust(a_x, b_x, A_v=line[5]) umag = spectrum.calcMag(self.uband) self.assertAlmostEqual(umag, controlData['cartoon_u'][ix], 3) gmag = spectrum.calcMag(self.gband) self.assertAlmostEqual(gmag, controlData['cartoon_g'][ix], 3) lsst_umag = spectrum.calcMag(lsst_u_band) self.assertAlmostEqual(lsst_umag, controlData['lsst_u'][ix], 3) lsst_gmag = spectrum.calcMag(lsst_g_band) self.assertAlmostEqual(lsst_gmag, controlData['lsst_g'][ix], 3) umagError, gamma = calcMagError_m5(umag, self.uband, obs.m5['c_u'], PhotometricParameters()) gmagError, gamma = calcMagError_m5(gmag, self.gband, obs.m5['c_g'], PhotometricParameters()) self.assertAlmostEqual(umagError, controlData['sigma_cartoon_u'][ix], 3) self.assertAlmostEqual(gmagError, controlData['sigma_cartoon_g'][ix], 3) lsst_umagError, gamma = calcMagError_m5(lsst_umag, lsst_u_band, obs.m5['u'], PhotometricParameters()) lsst_gmagError, gamma = calcMagError_m5(lsst_gmag, lsst_g_band, obs.m5['g'], PhotometricParameters()) self.assertAlmostEqual(lsst_umagError, controlData['sigma_lsst_u'][ix], 3) self.assertAlmostEqual(lsst_gmagError, controlData['sigma_lsst_g'][ix], 3) self.assertGreater(np.abs(lsst_umagError-umagError), 0.01) self.assertGreater(np.abs(lsst_gmagError-gmagError), 0.01)
def testGalSimPhoSimCat(self): """ Run a GalSimPhoSim catalog on some data. Then, generate an ordinary PhoSim catalog using the same data. Verify that the two resulting PhoSim catalogs are identical. """ galsim_cat_name = os.path.join(self.dataDir, 'galSimPhoSim_galsim_cat.txt') phosim_cat_name = os.path.join(self.dataDir, 'galSimPhoSim_phosim_cat.txt') galsim_image_root = os.path.join(self.dataDir, 'galSimPhoSim_images') db = fileDBObject(self.bulge_name, dtype=self.dtype, runtable='test_bulges', idColKey='id') db.raColName = 'ra_deg' db.decColName = 'dec_deg' db.objectTypeId = 55 gs_cat = GalSimPhoSimGalaxies(db, obs_metadata=self.obs) gs_cat.camera_wrapper = GalSimCameraWrapper(self.camera) gs_cat.bandpassNames = self.obs.bandpass gs_cat.PSF = SNRdocumentPSF() gs_cat.phoSimHeaderMap = {} gs_cat.write_catalog(galsim_cat_name) gs_cat_0 = gs_cat ps_cat = PhoSimCatalogSersic2D(db, obs_metadata=self.obs) ps_cat.phoSimHeaderMap = {} ps_cat.write_catalog(phosim_cat_name) db = fileDBObject(self.disk_name, dtype=self.dtype, runtable='test_disks', idColKey='id') db.raColName = 'ra_deg' db.decColName = 'dec_deg' db.objectTypeId = 155 gs_cat = GalSimPhoSimGalaxies(db, obs_metadata=self.obs) gs_cat.bandpassNames = self.obs.bandpass gs_cat.copyGalSimInterpreter(gs_cat_0) gs_cat.write_catalog(galsim_cat_name, write_header=False, write_mode='a') gs_cat_0 = gs_cat ps_cat = PhoSimCatalogSersic2D(db, obs_metadata=self.obs) ps_cat.write_catalog(phosim_cat_name, write_header=False, write_mode='a') db = fileDBObject(self.agn_name, dtype=self.dtype, runtable='test_agn', idColKey='id') db.raColName = 'ra_deg' db.decColName = 'dec_deg' db.objectTypeId = 255 gs_cat = GalSimPhoSimAgn(db, obs_metadata=self.obs) gs_cat.bandpassNames = self.obs.bandpass gs_cat.copyGalSimInterpreter(gs_cat_0) gs_cat.write_catalog(galsim_cat_name, write_header=False, write_mode='a') gs_cat_0 = gs_cat ps_cat = PhoSimCatalogZPoint(db, obs_metadata=self.obs) ps_cat.write_catalog(phosim_cat_name, write_header=False, write_mode='a') db = fileDBObject(self.star_name, dtype=self.dtype, runtable='test_agn', idColKey='id') db.raColName = 'ra_deg' db.decColName = 'dec_deg' db.objectTypeId = 255 gs_cat = GalSimPhoSimStars(db, obs_metadata=self.obs) gs_cat.bandpassNames = self.obs.bandpass gs_cat.copyGalSimInterpreter(gs_cat_0) gs_cat.write_catalog(galsim_cat_name, write_header=False, write_mode='a') ps_cat = PhoSimCatalogPoint(db, obs_metadata=self.obs) ps_cat.write_catalog(phosim_cat_name, write_header=False, write_mode='a') written_files = gs_cat.write_images(nameRoot=galsim_image_root) self.assertGreater(len(written_files), 0) for name in written_files: os.unlink(name) with open(galsim_cat_name, 'r') as galsim_input: with open(phosim_cat_name, 'r') as phosim_input: galsim_lines = galsim_input.readlines() phosim_lines = phosim_input.readlines() self.assertEqual(len(galsim_lines), len(phosim_lines)) self.assertEqual(len(galsim_lines), 4 * self.n_objects + 7) for line in galsim_lines: self.assertIn(line, phosim_lines) for line in phosim_lines: self.assertIn(line, galsim_lines) if os.path.exists(galsim_cat_name): os.unlink(galsim_cat_name) if os.path.exists(phosim_cat_name): os.unlink(phosim_cat_name)
def test_different_cameras(self): rng = np.random.RandomState(6512) pointing_ra = 15.0 pointing_dec = 13.0 n_obj = 100 ra_list = pointing_ra + 2.0 * rng.random_sample(n_obj) dec_list = pointing_dec + 2.0 * rng.random_sample(n_obj) px_list = radiansFromArcsec(0.005) * rng.random_sample(n_obj) px_list += radiansFromArcsec(0.001) mura_list = radiansFromArcsec(0.005) * rng.random_sample(n_obj) mudec_list = radiansFromArcsec(0.005) * rng.random_sample(n_obj) vrad_list = 100.0 * rng.random_sample(n_obj) with lsst.utils.tests.getTempFilePath('.txt') as db_text_file: with open(db_text_file, 'w') as out_file: for ix, (rdeg, ddeg, rrad, drad, px, mura, mudec, vrad) in \ enumerate(zip(ra_list, dec_list, np.radians(ra_list), np.radians(dec_list), px_list, mura_list, mudec_list, vrad_list)): out_file.write( '%d %e %e %e %e %e %e %e %e\n' % (ix, rdeg, ddeg, rrad, drad, px, mura, mudec, vrad)) dtype = np.dtype([('id', int), ('raDeg', float), ('decDeg', float), ('raJ2000', float), ('decJ2000', float), ('parallax', float), ('properMotionRa', float), ('properMotionDec', float), ('radialVelocity', float)]) db = fileDBObject(db_text_file, dtype=dtype, idColKey='id') db.raColName = 'raDeg' db.decColName = 'decDeg' class CameraCoordsCatalog(AstrometryStars, CameraCoords, InstanceCatalog): camera = LsstSimMapper().camera column_outputs = ['id', 'chipName'] class CameraCoordsLSSTCatalog(AstrometryStars, CameraCoordsLSST, InstanceCatalog): column_outputs = ['id', 'chipName'] obs = ObservationMetaData(pointingRA=pointing_ra, pointingDec=pointing_dec, boundLength=1.75, boundType='circle', rotSkyPos=23.0, mjd=59580.0) control_cat = CameraCoordsCatalog(db, obs_metadata=obs) test_cat = CameraCoordsLSSTCatalog(db, obs_metadata=obs) control_line_list = [] none_chips = 0 for line in control_cat.iter_catalog(): if line[1] is None: none_chips += 1 control_line_list.append(line) self.assertGreater(len(control_line_list), 0) self.assertLess(none_chips, len(control_line_list) / 2) line_ct = 0 for line in test_cat.iter_catalog(): line_ct += 1 self.assertIn(line, control_line_list) self.assertEqual(line_ct, len(control_line_list))
def setUpClass(cls): rng = np.random.RandomState(119) n_galaxies = 20 sed_dir = os.path.join(getPackageDir("sims_sed_library"), "galaxySED") list_of_seds = os.listdir(sed_dir) disk_sed_dexes = rng.randint(0, len(list_of_seds), size=n_galaxies) bulge_sed_dexes = rng.randint(0, len(list_of_seds), size=n_galaxies) avBulge = rng.random_sample(n_galaxies) * 0.3 + 0.1 avDisk = rng.random_sample(n_galaxies) * 0.3 + 0.1 mjdList = rng.random_sample(n_galaxies) * 10.0 + 49330.0 redshiftList = rng.random_sample(n_galaxies) * 1.5 + 0.01 tauList = rng.random_sample(n_galaxies) * 1.0 + 1.0 sfuList = rng.random_sample(n_galaxies) * 2.0 + 1.0 sfgList = rng.random_sample(n_galaxies) * 2.0 + 1.0 sfrList = rng.random_sample(n_galaxies) * 2.0 + 1.0 sfiList = rng.random_sample(n_galaxies) * 2.0 + 1.0 sfzList = rng.random_sample(n_galaxies) * 2.0 + 1.0 sfyList = rng.random_sample(n_galaxies) * 2.0 + 1.0 raList = rng.random_sample(n_galaxies) * 7.0 + 78.0 decList = rng.random_sample(n_galaxies) * 4.0 - 69.0 normDisk = rng.random_sample(n_galaxies) * 5.0 + 20.0 normBulge = rng.random_sample(n_galaxies) * 5.0 + 20.0 normAgn = rng.random_sample(n_galaxies) * 5.0 + 20.0 with lsst.utils.tests.getTempFilePath('.txt') as txt_cat_name: with open(txt_cat_name, "w") as output_file: for ix in range(n_galaxies): varParam = { 'varMethodName': 'applyAgn', 'pars': { 'agn_tau': tauList[ix], 'agn_sfu': sfuList[ix], 'agn_sfg': sfgList[ix], 'agn_sfr': sfrList[ix], 'agn_sfi': sfiList[ix], 'agn_sfz': sfzList[ix], 'agn_sfy': sfyList[ix], 't0_mjd': mjdList[ix], 'seed': rng.randint(0, 200000) } } paramStr = json.dumps(varParam) output_file.write( "%d;%f;%f;" % (ix, raList[ix], decList[ix]) + "%f;%f;" % (np.radians(raList[ix]), np.radians(decList[ix])) + "%f;" % (redshiftList[ix]) + "%s;%f;%f;" % (list_of_seds[disk_sed_dexes[ix]], avDisk[ix], normDisk[ix]) + "%s;%f;%f;" % (list_of_seds[bulge_sed_dexes[ix]], avBulge[ix], normBulge[ix]) + "agn.spec;%s;%f\n" % (paramStr, normAgn[ix])) dtype = np.dtype([('galid', np.int), ('raDeg', np.float), ('decDeg', np.float), ('raJ2000', np.float), ('decJ2000', np.float), ('redshift', np.float), ('sedFilenameDisk', str, 300), ('internalAvDisk', np.float), ('magNormDisk', np.float), ('sedFilenameBulge', str, 300), ('internalAvBulge', np.float), ('magNormBulge', np.float), ('sedFilenameAgn', str, 300), ('varParamStr', str, 600), ('magNormAgn', np.float)]) cls.agn_db = fileDBObject(txt_cat_name, delimiter=';', runtable='test', dtype=dtype, idColKey='galid') cls.agn_db.raColName = 'raDeg' cls.agn_db.decColName = 'decDeg' cls.agn_db.objectTypeId = 112 # what follows is a hack to deal with the fact thar # our varParamStr values are longer than 256 characters # which is the default maximum length that a # CatalogDBObject expects a string to be # cls.agn_db.dbTypeMap['STRING'] = (str, 600) cls.agn_db.columns = None cls.agn_db._make_default_columns() cls.agn_db._make_column_map() cls.agn_db._make_type_map() cls.opsimDb = os.path.join(getPackageDir("sims_data"), "OpSimData") cls.opsimDb = os.path.join(cls.opsimDb, "opsimblitz1_1133_sqlite.db")
def setUpClass(cls): rng = np.random.RandomState(119) cls.txt_cat_name = tempfile.mktemp(prefix='agn_lc_cat', suffix='.txt', dir=ROOT) n_galaxies = 20 sed_dir = os.path.join(getPackageDir("sims_sed_library"), "galaxySED") list_of_seds = os.listdir(sed_dir) disk_sed_dexes = rng.randint(0, len(list_of_seds), size=n_galaxies) bulge_sed_dexes = rng.randint(0, len(list_of_seds), size=n_galaxies) avBulge = rng.random_sample(n_galaxies)*0.3+0.1 avDisk = rng.random_sample(n_galaxies)*0.3+0.1 mjdList = rng.random_sample(n_galaxies)*10.0+49330.0 redshiftList = rng.random_sample(n_galaxies)*1.5+0.01 tauList = rng.random_sample(n_galaxies)*1.0+1.0 sfuList = rng.random_sample(n_galaxies)*2.0+1.0 sfgList = rng.random_sample(n_galaxies)*2.0+1.0 sfrList = rng.random_sample(n_galaxies)*2.0+1.0 sfiList = rng.random_sample(n_galaxies)*2.0+1.0 sfzList = rng.random_sample(n_galaxies)*2.0+1.0 sfyList = rng.random_sample(n_galaxies)*2.0+1.0 raList = rng.random_sample(n_galaxies)*7.0+78.0 decList = rng.random_sample(n_galaxies)*4.0-69.0 normDisk = rng.random_sample(n_galaxies)*5.0+20.0 normBulge = rng.random_sample(n_galaxies)*5.0+20.0 normAgn = rng.random_sample(n_galaxies)*5.0+20.0 with open(cls.txt_cat_name, "w") as output_file: for ix in range(n_galaxies): varParam = {'varMethodName': 'applyAgn', 'pars': {'agn_tau': tauList[ix], 'agn_sfu': sfuList[ix], 'agn_sfg': sfgList[ix], 'agn_sfr': sfrList[ix], 'agn_sfi': sfiList[ix], 'agn_sfz': sfzList[ix], 'agn_sfy': sfyList[ix], 't0_mjd': mjdList[ix], 'seed': rng.randint(0, 200000)}} paramStr = json.dumps(varParam) output_file.write("%d;%f;%f;" % (ix, raList[ix], decList[ix]) + "%f;%f;" % (np.radians(raList[ix]), np.radians(decList[ix])) + "%f;" % (redshiftList[ix]) + "%s;%f;%f;" % (list_of_seds[disk_sed_dexes[ix]], avDisk[ix], normDisk[ix]) + "%s;%f;%f;" % (list_of_seds[bulge_sed_dexes[ix]], avBulge[ix], normBulge[ix]) + "agn.spec;%s;%f\n" % (paramStr, normAgn[ix])) dtype = np.dtype([ ('galid', np.int), ('raDeg', np.float), ('decDeg', np.float), ('raJ2000', np.float), ('decJ2000', np.float), ('redshift', np.float), ('sedFilenameDisk', str, 300), ('internalAvDisk', np.float), ('magNormDisk', np.float), ('sedFilenameBulge', str, 300), ('internalAvBulge', np.float), ('magNormBulge', np.float), ('sedFilenameAgn', str, 300), ('varParamStr', str, 600), ('magNormAgn', np.float) ]) cls.agn_db = fileDBObject(cls.txt_cat_name, delimiter=';', runtable='test', dtype=dtype, idColKey='galid') cls.agn_db.raColName = 'raDeg' cls.agn_db.decColName = 'decDeg' cls.agn_db.objectTypeId = 112 # what follows is a hack to deal with the fact thar # our varParamStr values are longer than 256 characters # which is the default maximum length that a # CatalogDBObject expects a string to be # cls.agn_db.dbTypeMap['STRING'] = (str, 600) cls.agn_db.columns = None cls.agn_db._make_default_columns() cls.agn_db._make_column_map() cls.agn_db._make_type_map() cls.opsimDb = os.path.join(getPackageDir("sims_data"), "OpSimData") cls.opsimDb = os.path.join(cls.opsimDb, "opsimblitz1_1133_sqlite.db")
def setUpClass(cls): cls.baseDir = SCRATCH_DIR cls.table1FileName = os.path.join(cls.baseDir, 'compound_table1.txt') cls.table2FileName = os.path.join(cls.baseDir, 'compound_table2.txt') if os.path.exists(cls.table1FileName): os.unlink(cls.table1FileName) if os.path.exists(cls.table2FileName): os.unlink(cls.table2FileName) dtype1 = np.dtype([('ra', np.float), ('dec', np.float), ('mag', np.float), ('dmag', np.float), ('dra', np.float), ('ddec', np.float)]) dbDtype1 = np.dtype([('id', np.int), ('ra', np.float), ('dec', np.float), ('mag', np.float), ('dmag', np.float), ('dra', np.float), ('ddec', np.float)]) nPts = 100 np.random.seed(42) raList = np.random.random_sample(nPts) * 360.0 decList = np.random.random_sample(nPts) * 180.0 - 90.0 magList = np.random.random_sample(nPts) * 10.0 + 15.0 dmagList = np.random.random_sample(nPts) * 10.0 - 5.0 draList = np.random.random_sample(nPts) * 5.0 - 2.5 ddecList = np.random.random_sample(nPts) * (-2.0) - 4.0 cls.table1Control = np.rec.fromrecords( [(r, d, mm, dm, dr, dd) for r, d, mm, dm, dr, dd in zip( raList, decList, magList, dmagList, draList, ddecList)], dtype=dtype1) with open(cls.table1FileName, 'w') as output: output.write("# id ra dec mag dmag dra ddec\n") for ix, (r, d, mm, dm, dr, dd) in \ enumerate(zip(raList, decList, magList, dmagList, draList, ddecList)): output.write('%d %.12f %.12f %.12f %.12f %.12f %.12f\n' % (ix, r, d, mm, dm, dr, dd)) dtype2 = np.dtype([('ra', np.float), ('dec', np.float), ('mag', np.float)]) dbDtype2 = np.dtype([('id', np.int), ('ra', np.float), ('dec', np.float), ('mag', np.float)]) ra2List = np.random.random_sample(nPts) * 360.0 dec2List = np.random.random_sample(nPts) * 180.0 - 90.0 mag2List = np.random.random_sample(nPts) * 10 + 18.0 cls.table2Control = np.rec.fromrecords( [(r, d, m) for r, d, m in zip(ra2List, dec2List, mag2List)], dtype=dtype2) with open(cls.table2FileName, 'w') as output: output.write('# id ra dec mag\n') for ix, (r, d, m) in enumerate(zip(ra2List, dec2List, mag2List)): output.write('%d %.12f %.12f %.12f\n' % (ix, r, d, m)) cls.dbName = cartoonDBbase().database if os.path.exists(cls.dbName): os.unlink(cls.dbName) fileDBObject(cls.table1FileName, runtable='table1', database=cls.dbName, dtype=dbDtype1, idColKey='id') fileDBObject(cls.table2FileName, runtable='table2', database=cls.dbName, dtype=dbDtype2, idColKey='id')
def setUpClass(cls): """ Create a fake catalog of RR Lyrae stars. Store it in cls.stellar_db """ cls.scratchDir = tempfile.mkdtemp(dir=ROOT, prefix='StellarLigghtCurveTest-') rng = np.random.RandomState(88) n_stars = 10000 sed_dir = os.path.join(getPackageDir("sims_sed_library")) sed_dir = os.path.join(sed_dir, "starSED", "kurucz") list_of_seds = os.listdir(sed_dir) lc_dir = os.path.join(getPackageDir("sims_sed_library"), "rrly_lc") lc_dir = os.path.join(lc_dir, "RRab") list_of_lc = [ 'rrly_lc/RRab/%s' % ww for ww in os.listdir(lc_dir) if "per.txt" in ww ] cls.dtype = np.dtype([('id', np.int), ('raDeg', np.float), ('decDeg', np.float), ('raJ2000', np.float), ('decJ2000', np.float), ('magNorm', np.float), ('galacticAv', np.float), ('sedFilename', str, 300), ('varParamStr', str, 300), ('parallax', np.float), ('ebv', np.float)]) # write the catalog as a text file to be ingested with fileDBObject cls.txt_name = os.path.join(cls.scratchDir, "stellar_lc_catalog.txt") with open(cls.txt_name, "w") as output_file: sed_dex = rng.randint(0, len(list_of_seds), size=n_stars) lc_dex = rng.randint(0, len(list_of_lc), size=n_stars) mjd0 = rng.random_sample(n_stars) * 10000.0 + 40000.0 raList = rng.random_sample(n_stars) * 360.0 decList = -90.0 + rng.random_sample(n_stars) * 120.0 magNormList = rng.random_sample(n_stars) * 3.0 + 14.0 AvList = rng.random_sample(n_stars) * 0.2 + 0.1 pxList = rng.random_sample(n_stars) * 0.1 for ix in range(n_stars): varparams = { 'varMethodName': 'applyRRly', 'pars': { 'tStartMjd': mjd0[ix], 'filename': list_of_lc[lc_dex[ix]] } } varparamstr = json.dumps(varparams) output_file.write( "%d;%lf;%lf;%lf;%lf;%lf;%lf;%s;%s;%lf;%lf\n" % (ix, raList[ix], decList[ix], np.radians( raList[ix]), np.radians(decList[ix]), magNormList[ix], AvList[ix], list_of_seds[sed_dex[ix]], varparamstr, pxList[ix], AvList[ix] / 3.1)) cls.stellar_db = fileDBObject(cls.txt_name, delimiter=';', runtable='test', dtype=cls.dtype, idColKey='id') cls.stellar_db.raColName = 'raDeg' cls.stellar_db.decColName = 'decDeg' cls.stellar_db.objectTypeId = 32 cls.opsimDb = os.path.join(getPackageDir("sims_data"), "OpSimData") cls.opsimDb = os.path.join(cls.opsimDb, "opsimblitz1_1133_sqlite.db")