def testObsMetaDataBounds(self): """ Make sure that the bound specifications (i.e. a circle or a box on the sky) are correctly passed through to the resulting ObservationMetaData """ gen = ObservationMetaDataGenerator() #Test a cirlce with a specified radius results = gen.getObservationMetaData(fieldRA=numpy.degrees(1.370916), telescopeFilter='i', boundLength=0.9) ct = 0 for obs_metadata in results: self.assertTrue(isinstance(obs_metadata.bounds,CircleBounds)) # include some wiggle room, in case ObservationMetaData needs to # adjust the boundLength to accommodate the transformation between # ICRS and observed coordinates self.assertGreaterEqual(obs_metadata.bounds.radiusdeg, 0.9) self.assertLess(obs_metadata.bounds.radiusdeg, 0.95) self.assertAlmostEqual(obs_metadata.bounds.RA, obs_metadata.phoSimMetaData['pointingRA'][0], 5) self.assertAlmostEqual(obs_metadata.bounds.DEC, obs_metadata.phoSimMetaData['pointingDec'][0], 5) ct += 1 #Make sure that some ObservationMetaData were tested self.assertGreater(ct, 0) boundLengthList = [1.2, (1.2, 0.6)] for boundLength in boundLengthList: results = gen.getObservationMetaData(fieldRA=numpy.degrees(1.370916), telescopeFilter='i', boundType='box', boundLength=boundLength) if hasattr(boundLength, '__len__'): dra = boundLength[0] ddec = boundLength[1] else: dra = boundLength ddec = boundLength ct = 0 for obs_metadata in results: RAdeg = numpy.degrees(obs_metadata.phoSimMetaData['pointingRA'][0]) DECdeg = numpy.degrees(obs_metadata.phoSimMetaData['pointingDec'][0]) self.assertTrue(isinstance(obs_metadata.bounds,BoxBounds)) self.assertAlmostEqual(obs_metadata.bounds.RAminDeg, RAdeg-dra, 10) self.assertAlmostEqual(obs_metadata.bounds.RAmaxDeg, RAdeg+dra, 10) self.assertAlmostEqual(obs_metadata.bounds.DECminDeg, DECdeg-ddec, 10) self.assertAlmostEqual(obs_metadata.bounds.DECmaxDeg, DECdeg+ddec, 10) self.assertAlmostEqual(obs_metadata.bounds.RA, obs_metadata.phoSimMetaData['pointingRA'][0], 5) self.assertAlmostEqual(obs_metadata.bounds.DEC, obs_metadata.phoSimMetaData['pointingDec'][0], 5) ct += 1 #Make sure that some ObservationMetaData were tested self.assertGreater(ct, 0)
def test_multiband_light_curves(self): """ Check that multi-band light curves are returned correctly. """ raRange = (78.0, 82.0) decRange = (-69.0, -65.0) bandpass = ('r', 'g') gen = StellarLightCurveGenerator(self.stellar_db, self.opsimDb) pointings = gen.get_pointings(raRange, decRange, bandpass=bandpass) lc_dict, truth_info = gen.light_curves_from_pointings(pointings) self.assertGreater(len(lc_dict), 2) obs_gen = ObservationMetaDataGenerator(database=self.opsimDb, driver='sqlite') control_pointings_r = obs_gen.getObservationMetaData(fieldRA=raRange, fieldDec=decRange, telescopeFilter='r', boundLength=1.75) control_pointings_g = obs_gen.getObservationMetaData(fieldRA=raRange, fieldDec=decRange, telescopeFilter='g', boundLength=1.75) self.assertGreater(len(control_pointings_g), 0) self.assertGreater(len(control_pointings_r), 0) ct = 0 for obs in control_pointings_r: cat = stellarControlCatalog(self.stellar_db, obs_metadata=obs) for star_obj in cat.iter_catalog(): ct += 1 lc = lc_dict[star_obj[0]]['r'] dex = np.argmin(np.abs(lc['mjd']-obs.mjd.TAI)) self.assertLess(np.abs(lc['mjd'][dex]-obs.mjd.TAI), 1.0e-7) self.assertLess(np.abs(lc['mag'][dex]-star_obj[3]), 1.0e-7) self.assertLess(np.abs(lc['error'][dex]-star_obj[4]), 1.0e-7) for obs in control_pointings_g: cat = stellarControlCatalog(self.stellar_db, obs_metadata=obs) for star_obj in cat.iter_catalog(): ct += 1 lc = lc_dict[star_obj[0]]['g'] dex = np.argmin(np.abs(lc['mjd']-obs.mjd.TAI)) self.assertLess(np.abs(lc['mjd'][dex]-obs.mjd.TAI), 1.0e-7) self.assertLess(np.abs(lc['mag'][dex]-star_obj[3]), 1.0e-7) self.assertLess(np.abs(lc['error'][dex]-star_obj[4]), 1.0e-7) # Verify that the same number of objects and observations were found in the # catalogs and the LightCurveGenerator output total_ct = 0 for obj_name in lc_dict: for bandpass in lc_dict[obj_name]: total_ct += len(lc_dict[obj_name][bandpass]['mjd']) self.assertEqual(ct, total_ct)
def test_sne_multiband_light_curves(self): """ Generate some super nova light curves. Verify that they come up with the same magnitudes and uncertainties as supernova catalogs. Use multiband light curves. """ gen = SNIaLightCurveGenerator(self.db, self.opsimDb) raRange = (78.0, 85.0) decRange = (-69.0, -65.0) pointings = gen.get_pointings(raRange, decRange, bandpass=('r', 'z')) gen.sn_universe._midSurveyTime = 49000.0 gen.sn_universe._snFrequency = 0.001 self.assertGreater(len(pointings), 1) lc_dict, truth = gen.light_curves_from_pointings(pointings) self.assertGreater(len(lc_dict), 0) obs_gen = ObservationMetaDataGenerator(database=self.opsimDb, driver='sqlite') control_obs_r = obs_gen.getObservationMetaData(fieldRA=raRange, fieldDec=decRange, telescopeFilter='r', boundLength=1.75) control_obs_z = obs_gen.getObservationMetaData(fieldRA=raRange, fieldDec=decRange, telescopeFilter='z', boundLength=1.75) self.assertGreater(len(control_obs_r), 0) self.assertGreater(len(control_obs_z), 0) ct_r = 0 for obs in control_obs_r: cat = SNIaLightCurveControlCatalog(self.db, obs_metadata=obs) for sn in cat.iter_catalog(): if sn[1] > 0.0: ct_r += 1 lc = lc_dict[sn[0]]['r'] dex = np.argmin(np.abs(lc['mjd'] - obs.mjd.TAI)) self.assertLess(np.abs(lc['mjd'][dex] - obs.mjd.TAI), 1.0e-7) self.assertLess(np.abs(lc['flux'][dex] - sn[1]), 1.0e-7) self.assertLess(np.abs(lc['error'][dex] - sn[2]), 1.0e-7) self.assertGreater(ct_r, 0) ct_z = 0 for obs in control_obs_z: cat = SNIaLightCurveControlCatalog(self.db, obs_metadata=obs) for sn in cat.iter_catalog(): if sn[1] > 0.0: ct_z += 1 lc = lc_dict[sn[0]]['z'] dex = np.argmin(np.abs(lc['mjd'] - obs.mjd.TAI)) self.assertLess(np.abs(lc['mjd'][dex] - obs.mjd.TAI), 1.0e-7) self.assertLess(np.abs(lc['flux'][dex] - sn[1]), 1.0e-7) self.assertLess(np.abs(lc['error'][dex] - sn[2]), 1.0e-7) self.assertGreater(ct_z, 0)
def _read_pointing_info(self, opsim_db): try: self.ratel = self.eimage[0].header['RATEL'] self.dectel = self.eimage[0].header['DECTEL'] self.rotangle = self.eimage[0].header['ROTANGLE'] return except KeyError: if opsim_db is None: raise RuntimeError("eimage file does not have pointing info. " "Need an opsim db file.") # Read from the opsim db. # We need an ObservationMetaData object to use the getRotSkyPos # function. obs_gen = ObservationMetaDataGenerator(database=opsim_db, driver="sqlite") obs_md = obs_gen.getObservationMetaData(obsHistID=self.visit, boundType='circle', boundLength=0)[0] # Extract pointing info from opsim db for desired visit. conn = sqlite3.connect(opsim_db) query = """select descDitheredRA, descDitheredDec, descDitheredRotTelPos from summary where obshistid={}""".format(self.visit) curs = conn.execute(query) ra, dec, rottelpos = [np.degrees(x) for x in curs][0] conn.close() self.ratel, self.dectel = ra, dec obs_md.pointingRA = ra obs_md.pointingDec = dec self.rotangle = getRotSkyPos(ra, dec, obs_md, rottelpos)
def test_query(self): """ Use ObservationMetaData to query an OpSim-like database that contains dithering columns. Make sure that the dithering columns get carried over into the OpsimMetaData of the resulting ObservationMetaData. """ gen = ObservationMetaDataGenerator(database=self.fake_db_name, driver='sqlite') obs_list = gen.getObservationMetaData(fieldRA=(0.0, 180.0)) self.assertGreater(len(obs_list), 0) found_list = [] for obs in obs_list: obsid = obs.OpsimMetaData['obsHistID'] control_dict = self.db_control[obsid] self.assertAlmostEqual(obs._pointingRA, control_dict['ra'], 11) self.assertAlmostEqual(obs._pointingDec, control_dict['dec'], 11) self.assertAlmostEqual(obs._rotSkyPos, control_dict['rot'], 11) self.assertAlmostEqual(obs.OpsimMetaData['m5'], control_dict['m5'], 11) self.assertAlmostEqual(obs.OpsimMetaData['raTestDithering'], control_dict['raDith'], 11) self.assertAlmostEqual(obs.OpsimMetaData['decTestDithering'], control_dict['decDith'], 11) self.assertAlmostEqual(obs.mjd.TAI, control_dict['mjd'], 11) self.assertEqual(obs.bandpass, 'g') self.assertGreaterEqual(obs.pointingRA, 0.0) self.assertLessEqual(obs.pointingRA, 180.0) found_list.append(obs.OpsimMetaData['obsHistID']) # check that the entries not returned do, in fact, violate the query for ix in range(len(self.db_control)): if ix not in found_list: self.assertGreater(self.db_control[ix]['ra'], np.radians(180.0))
def make_refcat(opsim_db, obsHistID, boundLength, outfile, catsim_db_info=None, chunk_size=20000): """ Create a reference catalog of stars to use for astrometry from the CatSim db tables. Parameters ---------- opsim_db : str OpSim database sqlite file obsHistID : int Visit number to provide the center of the extraction region. boundLength : float Radius of the extraction region in units of degrees. outfile : str Filename for the reference catalog output file. catsim_db_info : dict, optional Connection information (host, port, database, driver) for the CatSim database. Default: connection info for the UW fatboy server. chunk_size : int, optional The memory chunk size to pass to InstanceCatalog.write_catalog """ if catsim_db_info is None: catsim_db_info = catsim_uw generator = ObservationMetaDataGenerator(database=opsim_db, driver='sqlite') obs_metadata = generator.getObservationMetaData(obsHistID=obsHistID, boundLength=boundLength)[0] stars = CatalogDBObject.from_objid('allstars', **catsim_db_info) ref_stars = SimulationReference(stars, obs_metadata=obs_metadata) ref_stars.write_catalog(outfile, write_mode='w', write_header=True, chunk_size=chunk_size)
def testCreationOfPhoSimCatalog_3(self): """ Make sure that we can create PhoSim input catalogs using the returned ObservationMetaData. Test that an error is actually raised if we try to build a PhoSim catalog with a v3 header map using a v4 ObservationMetaData """ dbName = tempfile.mktemp(dir=ROOT, prefix='obsMetaDataGeneratorTest-', suffix='.db') makePhoSimTestDB(filename=dbName) bulgeDB = testGalaxyBulgeDBObj(driver='sqlite', database=dbName) opsim_db = os.path.join(getPackageDir('sims_data'), 'OpSimData', 'astro-lsst-01_2014.db') assert os.path.isfile(opsim_db) gen = ObservationMetaDataGenerator(opsim_db, driver='sqlite') results = gen.getObservationMetaData(fieldRA=(70.0, 85.0), telescopeFilter='i') self.assertGreater(len(results), 0) testCat = PhoSimCatalogSersic2D(bulgeDB, obs_metadata=results[0]) testCat.phoSimHeaderMap = DefaultPhoSimHeaderMap with lsst.utils.tests.getTempFilePath('.txt') as catName: with self.assertRaises(RuntimeError): testCat.write_catalog(catName) if os.path.exists(dbName): os.unlink(dbName)
def setUp(self): self.obsHistID = 1418971 obs_gen = ObservationMetaDataGenerator(database=os.environ['OPSIMDB'], driver='sqlite') self.obs_md \ = obs_gen.getObservationMetaData(obsHistID=self.obsHistID)[0] self.outfile = 'phosim_instcat_%i.txt' % self.obsHistID
def testIncompletDB(self): """ Test that if the mock OpSim database does not have all required columns, an exception is raised. """ scratch_dir = os.path.join(getPackageDir('sims_catUtils'), 'tests', 'scratchSpace') opsim_db_name = os.path.join(scratch_dir, 'incomplete_mock_opsim_sqlite.db') if os.path.exists(opsim_db_name): os.unlink(opsim_db_name) conn = sqlite3.connect(opsim_db_name) c = conn.cursor() c.execute('''CREATE TABLE Summary (obsHistID int, expMJD real, ''' '''fieldRA real, filter text)''') conn.commit() rng = np.random.RandomState(77) n_pointings = 100 ra_data = rng.random_sample(n_pointings) * 2.0 * np.pi mjd_data = rng.random_sample(n_pointings) * 1000.0 + 59580.0 filter_dexes = rng.randint(0, 6, n_pointings) bands = ('u', 'g', 'r', 'i', 'z', 'y') filter_data = [] for ii in filter_dexes: filter_data.append(bands[ii]) for ii in range(n_pointings): cmd = '''INSERT INTO Summary VALUES(%i, %f, %f, '%s')''' % \ (ii, mjd_data[ii], ra_data[ii], filter_data[ii]) c.execute(cmd) conn.commit() conn.close() incomplete_obs_gen = ObservationMetaDataGenerator( database=opsim_db_name) with self.assertRaises(RuntimeError) as context: incomplete_obs_gen.getObservationMetaData(telescopeFilter='r') self.assertIn( "ObservationMetaDataGenerator requires that the database", context.exception.args[0]) if os.path.exists(opsim_db_name): os.unlink(opsim_db_name)
def testQueryLimit(self): """ Test that, when we specify a limit on the number of ObservationMetaData we want returned, that limit is respected """ gen = ObservationMetaDataGenerator() results = gen.getObservationMetaData(fieldRA=(numpy.degrees(1.370916), numpy.degrees(1.5348635)), limit=20) self.assertEqual(len(results),20)
def setUpClass(cls): opsimdb = os.path.join(getPackageDir('sims_data'), 'OpSimData', 'opsimblitz1_1133_sqlite.db') obs_gen = ObservationMetaDataGenerator(opsimdb) cls.obs_dict = {} for band in 'ugrizy': obs_list = obs_gen.getObservationMetaData(telescopeFilter=band, limit=10) assert len(obs_list) > 0 cls.obs_dict[band] = obs_list[0]
def _set_obs_md_results(self, opsim_db, fieldRA, fieldDec, boundLength, pickle_file): if pickle_file is not None and os.path.isfile(pickle_file): self.obs_md_results = pickle.load(open(pickle_file)) else: # Generate the observation metadata from the db file. gen = ObservationMetaDataGenerator(database=opsim_db, driver="sqlite") self.obs_md_results = gen.getObservationMetaData( fieldRA=fieldRA, fieldDec=fieldDec, boundLength=boundLength ) if pickle_file is not None: pickle.dump(self.obs_md_results, open(pickle_file, "w"))
def _set_obs_md_results(self, opsim_db, fieldRA, fieldDec, boundLength, pickle_file): if pickle_file is not None and os.path.isfile(pickle_file): self.obs_md_results = pickle.load(open(pickle_file)) else: # Generate the observation metadata from the db file. gen = ObservationMetaDataGenerator(database=opsim_db, driver='sqlite') self.obs_md_results = gen.getObservationMetaData( fieldRA=fieldRA, fieldDec=fieldDec, boundLength=boundLength) if pickle_file is not None: pickle.dump(self.obs_md_results, open(pickle_file, 'w'))
def testIncompletDB(self): """ Test that if the mock OpSim database does not have all required columns, an exception is raised. """ opsim_db_name = tempfile.mktemp(dir=ROOT, prefix='incomplete_mock_opsim_sqlite-', suffix='.db') conn = sqlite3.connect(opsim_db_name) c = conn.cursor() c.execute('''CREATE TABLE Summary (obsHistID int, expMJD real, ''' '''fieldRA real, filter text)''') conn.commit() rng = np.random.RandomState(77) n_pointings = 100 ra_data = rng.random_sample(n_pointings)*2.0*np.pi mjd_data = rng.random_sample(n_pointings)*1000.0 + 59580.0 filter_dexes = rng.randint(0, 6, n_pointings) bands = ('u', 'g', 'r', 'i', 'z', 'y') filter_data = [] for ii in filter_dexes: filter_data.append(bands[ii]) for ii in range(n_pointings): cmd = '''INSERT INTO Summary VALUES(%i, %f, %f, '%s')''' % \ (ii, mjd_data[ii], ra_data[ii], filter_data[ii]) c.execute(cmd) conn.commit() conn.close() incomplete_obs_gen = ObservationMetaDataGenerator(database=opsim_db_name) with self.assertRaises(RuntimeError) as context: incomplete_obs_gen.getObservationMetaData(telescopeFilter='r') self.assertIn("ObservationMetaDataGenerator requires that the database", context.exception.args[0]) if os.path.exists(opsim_db_name): os.unlink(opsim_db_name)
def testQueryOnFilter(self): """ Test that queries on the filter work. """ gen = ObservationMetaDataGenerator() results = gen.getObservationMetaData(fieldRA=numpy.degrees(1.370916), telescopeFilter='i') ct = 0 for obs_metadata in results: self.assertAlmostEqual(obs_metadata.phoSimMetaData['pointingRA'][0],1.370916) self.assertEqual(obs_metadata.phoSimMetaData['Opsim_filter'][0],'i') ct += 1 #Make sure that more than zero ObservationMetaData were returned self.assertGreater(ct, 0)
def testCreationOfPhoSimCatalog(self): """ Make sure that we can create PhoSim input catalogs using the returned ObservationMetaData. This test will just make sure that all of the expected header entries are there. """ dbName = 'obsMetaDataGeneratorTest.db' catName = 'testPhoSimFromObsMetaDataGenerator.txt' if os.path.exists(dbName): os.unlink(dbName) junk_obs_metadata = makePhoSimTestDB(filename=dbName) bulgeDB = testGalaxyBulge(driver='sqlite', database=dbName) gen = ObservationMetaDataGenerator() results = gen.getObservationMetaData(fieldRA=numpy.degrees(1.370916),telescopeFilter='i') testCat = PhoSimCatalogSersic2D(bulgeDB, obs_metadata=results[0]) testCat.write_catalog(catName) filterTranslation=['u','g','r','i','z','y'] with open(catName) as inputFile: lines = inputFile.readlines() ix = 0 for control in gen.columnMapping: if control[0] != 'm5' and control[0]!='skyBrightness' and control[0]!='seeing': words = lines[ix].split() self.assertEqual(control[2].replace('pointing', 'Unrefracted_'), words[0]) if control[0] != 'telescopeFilter': if control[4] is not None: value = control[4](float(words[1])) else: value = float(words[1]) self.assertAlmostEqual(value, results[0].phoSimMetaData[control[2]][0], 5) else: self.assertEqual(filterTranslation[int(words[1])],results[0].phoSimMetaData[control[2]][0]) ix += 1 if os.path.exists(catName): os.unlink(catName) if os.path.exists(dbName): os.unlink(dbName)
class OpsimdbInterface(object): def __init__( self, opsim_db='/global/projecta/projectdirs/lsst/groups/SSim/DC2/minion_1016_desc_dithered_v4.db' ): self.conn = sqlite3.connect(opsim_db) self.obs_gen = ObservationMetaDataGenerator(database=opsim_db, driver='sqlite') self._cache = dict() def get_obs_md(self, obsHistID): if obsHistID not in self._cache: self._cache[obsHistID] = self._get_obs_md(obsHistID) return self._cache[obsHistID] def _get_obs_md(self, obsHistID): curs = self.conn.execute( 'select descDitheredRA, descDitheredDec, descDitheredRotTelPos from Summary where obsHistID={}' .format(obsHistID)) data = [x for x in curs][0] ra, dec = [x * 180. / np.pi for x in data[:2]] rottelpos = data[2] obs_md = self.obs_gen.getObservationMetaData(obsHistID=obsHistID, boundType='circle', boundLength=0.1)[0] obs_md.pointingRA = ra obs_md.pointingDec = dec obs_md.OpsimMetaData['rotTelPos'] = rottelpos obs_md.rotSkyPos = getRotSkyPos(obs_md._pointingRA, obs_md._pointingDec, obs_md, rottelpos) * 180. / np.pi return obs_md def plot_fov(self, obsHistID, radius=2.047): obs_md = self.get_obs_md(obsHistID) ra, dec = obs_md.pointingRA, obs_md.pointingDec phi = np.linspace(0, 2 * np.pi, 100) radius /= np.cos(dec * np.pi / 180.) plt.errorbar(radius * np.sin(phi) + ra, radius * np.cos(phi) + dec, fmt='--', label='{} fov'.format(obsHistID))
class DescObsMdGenerator: def __init__(self, opsim_db_file): self.obs_gen = ObservationMetaDataGenerator(database=opsim_db_file, driver='sqlite') self.opsim_db_file = opsim_db_file def create(self, visit): obs_md = self.obs_gen.getObservationMetaData(obsHistID=visit, boundType='circle', boundLength=0)[0] query = f'''select descDitheredRA, descDitheredDec, descDitheredRotTelPos from summary where obsHistID={visit}''' with sqlite3.connect(self.opsim_db_file) as conn: curs = conn.execute(query) ra, dec, rottelpos = [np.degrees(_) for _ in curs][0] obs_md.pointingRA = ra obs_md.pointingDec = dec obs_md.rotSkyPos = getRotSkyPos(ra, dec, obs_md, rottelpos) return obs_md
def test_spatial_query(self): """ Test that spatial queries work """ db_dir = os.path.join(getPackageDir('sims_data'), 'OpSimData') assert os.path.isdir(db_dir) db_file = os.path.join(db_dir, 'astro-lsst-01_2014.db') obs_gen = ObservationMetaDataGenerator(db_file) obs_list = obs_gen.getObservationMetaData(fieldRA=(20.0, 40.0), fieldDec=(-30.0, -10.0)) self.assertGreater(len(obs_list), 10) with sqlite3.connect(db_file) as conn: cursor = conn.cursor() query = '''SELECT observationId, fieldRA, fieldDec, observationStartMJD, filter FROM SummaryAllProps WHERE fieldRA BETWEEN 20.0 AND 40.0 AND fieldDec BETWEEN -30.0 AND -10.0 ORDER BY observationId''' control = cursor.execute(query).fetchall() self.assertEqual(len(control), len(obs_list)) for ii in range(len(obs_list)): self.assertEqual(obs_list[ii].OpsimMetaData['observationId'], int(control[ii][0])) self.assertAlmostEqual(obs_list[ii].pointingRA, float(control[ii][1]), 10) self.assertAlmostEqual(obs_list[ii].pointingDec, float(control[ii][2]), 10) self.assertAlmostEqual(obs_list[ii].mjd.TAI, float(control[ii][3]), 7) self.assertEqual(obs_list[ii].bandpass, str(control[ii][4])) self.assertGreaterEqual(obs_list[ii].pointingRA, 20.0) self.assertLessEqual(obs_list[ii].pointingRA, 40.0) self.assertGreaterEqual(obs_list[ii].pointingDec, -30.0) self.assertLessEqual(obs_list[ii].pointingDec, -10.0)
def testQueryExactValues(self): """ Test that ObservationMetaData returned by a query demanding an exact value do, in fact, adhere to that requirement. """ gen = ObservationMetaDataGenerator() bounds = [ ('obsHistID',5973), ('expDate',1220779), ('fieldRA',numpy.degrees(1.370916)), ('fieldDec',numpy.degrees(-0.456238)), ('moonRA',numpy.degrees(2.914132)), ('moonDec',numpy.degrees(0.06305)), ('rotSkyPos',numpy.degrees(3.116656)), ('telescopeFilter','i'), ('rawSeeing',0.728562), ('seeing', 0.88911899999999999), ('sunAlt',numpy.degrees(-0.522905)), ('moonAlt',numpy.degrees(0.099096)), ('dist2Moon',numpy.degrees(1.570307)), ('moonPhase',52.2325), ('expMJD',49367.129396), ('altitude',numpy.degrees(0.781015)), ('azimuth',numpy.degrees(3.470077)), ('visitExpTime',30.0), ('airmass',1.420459), ('m5',22.815249), ('skyBrightness',19.017605)] for ii in range(len(bounds)): tag = bounds[ii][0] if tag != 'telescopeFilter' and tag != 'visitExpTime': name = gen.columnMapping[ii][2] args = {} args[tag] = bounds[ii][1] results = gen.getObservationMetaData(**args) if gen.columnMapping[ii][4] is not None: value = gen.columnMapping[ii][4](bounds[ii][1]) else: value = bounds[ii][1] if name is not None: ct = 0 for obs_metadata in results: self.assertAlmostEqual(value, obs_metadata.phoSimMetaData[name][0],10) ct += 1 #Make sure that we did not choose a value which returns zero ObservationMetaData self.assertGreater(ct, 0) elif tag == 'm5': ct = 0 for obs_metadata in results: self.assertAlmostEqual(value, obs_metadata.m5.values()[0]) ct += 1 self.assertGreater(ct, 0) elif tag == 'seeing': ct = 0 for obs_metadata in results: self.assertAlmostEqual(value, obs_metadata.seeing.values()[0]) ct += 1 self.assertGreater(ct, 0)
def test_agn_light_curves(self): """ Test the AgnLightCurveGenerator by generating some AGN light curves and comparing them to the results obtained by generating a series of InstanceCatalogs containing the same objects at the same MJDs """ raRange = (78.0, 85.0) decRange = (-69.0, -65.0) bandpass = '******' lc_gen = AgnLightCurveGenerator(self.agn_db, self.opsimDb) pointings = lc_gen.get_pointings(raRange, decRange, bandpass=bandpass) for row in pointings: for obs in row: mjd = ModifiedJulianDate(TAI=obs.mjd.TAI-49000.0+59580.0) obs.mjd = mjd test_light_curves, truth_info = lc_gen.light_curves_from_pointings(pointings) self.assertGreater(len(test_light_curves), 2) # make sure we got some light curves for unique_id in test_light_curves: # verify that the sources returned all do vary by making sure that the # np.diff run on the magnitudes reutrns something non-zero self.assertGreater(np.abs(np.diff(test_light_curves[unique_id][bandpass]['mag'])).max(), 0.0) self.assertGreater(len(test_light_curves[unique_id][bandpass]['mjd']), 0) # Now test that specifying a small chunk_size does not change the output # light curves chunk_light_curves, truth_info = lc_gen.light_curves_from_pointings(pointings, chunk_size=1) self.assertGreater(len(chunk_light_curves), 2) for unique_id in test_light_curves: self.assertEqual(len(test_light_curves[unique_id][bandpass]['mjd']), len(chunk_light_curves[unique_id][bandpass]['mjd'])) np.testing.assert_array_equal(test_light_curves[unique_id][bandpass]['mjd'], chunk_light_curves[unique_id][bandpass]['mjd']) np.testing.assert_array_equal(test_light_curves[unique_id][bandpass]['mag'], chunk_light_curves[unique_id][bandpass]['mag']) np.testing.assert_array_equal(test_light_curves[unique_id][bandpass]['error'], chunk_light_curves[unique_id][bandpass]['error']) # Now find all of the ObservationMetaData that were included in our # light curves, generate InstanceCatalogs from them separately, # and verify that the contents of the InstanceCatalogs agree with # the contents of the light curves. gen = ObservationMetaDataGenerator(database=self.opsimDb, driver='sqlite') obs_list = gen.getObservationMetaData(fieldRA=raRange, fieldDec=decRange, telescopeFilter=bandpass, boundLength=1.75) for obs in obs_list: mjd = ModifiedJulianDate(TAI=obs.mjd.TAI-49000.0+59580.0) obs.mjd = mjd ct = 0 for obs in obs_list: cat = agnControlCatalog(self.agn_db, obs_metadata=obs) for agn_obj in cat.iter_catalog(): ct += 1 lc = test_light_curves[agn_obj[0]][bandpass] dex = np.argmin(np.abs(lc['mjd']-obs.mjd.TAI)) self.assertLess(np.abs(lc['mjd'][dex]-obs.mjd.TAI), 1.0e-7) self.assertLess(np.abs(lc['mag'][dex]-agn_obj[3]), 1.0e-7) self.assertLess(np.abs(lc['error'][dex]-agn_obj[4]), 1.0e-7) # Verify that the catalogs and LightCurveGenerator returned the # same number of observations total_ct = 0 for obj_name in test_light_curves: for band in test_light_curves[obj_name]: total_ct += len(test_light_curves[obj_name][band]['mjd']) self.assertEqual(ct, total_ct)
import copy from lsst.sims.catUtils.utils import ObservationMetaDataGenerator from lsst.sims.catUtils.exampleCatalogDefinitions import PhoSimCatalogPoint from lsst.sims.catUtils.exampleCatalogDefinitions import DefaultPhoSimHeaderMap from lsst.sims.catUtils.baseCatalogModels import StarObj if __name__ == "__main__": db = StarObj(database='LSSTCATSIM', host='fatboy.phys.washington.edu', port=1433, driver='mssql+pymssql') opsimdb = os.path.join('/Users', 'danielsf', 'physics', 'lsst_150412', 'Development', 'garage', 'OpSimData', 'minion_1016_sqlite.db') assert os.path.exists(opsimdb) obs_gen = ObservationMetaDataGenerator(database=opsimdb) obs_list = obs_gen.getObservationMetaData(obsHistID=230) obs = obs_list[0] obs.boundLength=0.05 phosim_header_map = copy.deepcopy(DefaultPhoSimHeaderMap) phosim_header_map['rawSeeing'] = ('rawSeeing', None) phosim_header_map['FWHMeff'] = ('FWHMeff', None) phosim_header_map['FWHMgeom'] = ('FWHMgeom',None) cat = PhoSimCatalogPoint(db, obs_metadata=obs) cat.phoSimHeaderMap = phosim_header_map cat.write_catalog('catalogs/star_catalog.txt', chunk_size=10000)
def test_agn_light_curves(self): """ Test the AgnLightCurveGenerator by generating some AGN light curves and comparing them to the results obtained by generating a series of InstanceCatalogs containing the same objects at the same MJDs """ raRange = (78.0, 85.0) decRange = (-69.0, -65.0) bandpass = '******' lc_gen = AgnLightCurveGenerator(self.agn_db, self.opsimDb) pointings = lc_gen.get_pointings(raRange, decRange, bandpass=bandpass) for row in pointings: for obs in row: mjd = ModifiedJulianDate(TAI=obs.mjd.TAI - 49000.0 + 59580.0) obs.mjd = mjd test_light_curves, truth_info = lc_gen.light_curves_from_pointings( pointings) self.assertGreater(len(test_light_curves), 2) # make sure we got some light curves for unique_id in test_light_curves: # verify that the sources returned all do vary by making sure that the # np.diff run on the magnitudes reutrns something non-zero self.assertGreater( np.abs(np.diff( test_light_curves[unique_id][bandpass]['mag'])).max(), 0.0) self.assertGreater( len(test_light_curves[unique_id][bandpass]['mjd']), 0) # Now test that specifying a small chunk_size does not change the output # light curves chunk_light_curves, truth_info = lc_gen.light_curves_from_pointings( pointings, chunk_size=1) self.assertGreater(len(chunk_light_curves), 2) for unique_id in test_light_curves: self.assertEqual( len(test_light_curves[unique_id][bandpass]['mjd']), len(chunk_light_curves[unique_id][bandpass]['mjd'])) np.testing.assert_array_equal( test_light_curves[unique_id][bandpass]['mjd'], chunk_light_curves[unique_id][bandpass]['mjd']) np.testing.assert_array_equal( test_light_curves[unique_id][bandpass]['mag'], chunk_light_curves[unique_id][bandpass]['mag']) np.testing.assert_array_equal( test_light_curves[unique_id][bandpass]['error'], chunk_light_curves[unique_id][bandpass]['error']) # Now find all of the ObservationMetaData that were included in our # light curves, generate InstanceCatalogs from them separately, # and verify that the contents of the InstanceCatalogs agree with # the contents of the light curves. gen = ObservationMetaDataGenerator(database=self.opsimDb, driver='sqlite') obs_list = gen.getObservationMetaData(fieldRA=raRange, fieldDec=decRange, telescopeFilter=bandpass, boundLength=1.75) for obs in obs_list: mjd = ModifiedJulianDate(TAI=obs.mjd.TAI - 49000.0 + 59580.0) obs.mjd = mjd ct = 0 for obs in obs_list: cat = agnControlCatalog(self.agn_db, obs_metadata=obs) for agn_obj in cat.iter_catalog(): ct += 1 lc = test_light_curves[agn_obj[0]][bandpass] dex = np.argmin(np.abs(lc['mjd'] - obs.mjd.TAI)) self.assertLess(np.abs(lc['mjd'][dex] - obs.mjd.TAI), 1.0e-7) self.assertLess(np.abs(lc['mag'][dex] - agn_obj[3]), 1.0e-7) self.assertLess(np.abs(lc['error'][dex] - agn_obj[4]), 1.0e-7) # Verify that the catalogs and LightCurveGenerator returned the # same number of observations total_ct = 0 for obj_name in test_light_curves: for band in test_light_curves[obj_name]: total_ct += len(test_light_curves[obj_name][band]['mjd']) self.assertEqual(ct, total_ct)
#AperatureRadius = 1.75 #LSST's Actual FoV in degrees SearchRegionRA = (-30.0,-20.0) SearchRegionDec = (-30.0,-20.0) SearchAirmass = (1.0,1.5) DesiredFilter = None import eups import os from lsst.sims.catUtils.utils import ObservationMetaDataGenerator from lsst.sims.catalogs.generation.db import ObservationMetaData #help(ObservationMetaDataGenerator) opsimdb = os.path.join(eups.productDir('sims_data'),'OpSimData','enigma_1189_sqlite.db') gen = ObservationMetaDataGenerator(driver='sqlite', database=opsimdb) SimObData = gen.getObservationMetaData(boundType=AperatureType, boundLength=AperatureRadius, fieldRA=SearchRegionRA, fieldDec=SearchRegionDec, airmass=SearchAirmass, telescopeFilter=DesiredFilter) NumOfObservations = len(SimObData) #print SimsObData[0].__dict__ from prettytable import PrettyTable UniquePointings = list({(o.unrefractedRA,o.unrefractedDec) for o in SimObData}) NumOfPointings = len(UniquePointings) print 'Number of Unique Pointings:', NumOfPointings,'\n' table2 = PrettyTable(["Pointing RA","Pointing Dec"]) for x in UniquePointings: table2.add_row([x[0], x[1]]) print table2 ObMetaData = [[] for _ in xrange(NumOfPointings)]
def testQueryOnRanges(self): """ Test that ObservationMetaData objects returned by queries of the form min < value < max are, in fact, within that range. Test when querying on both a single and two columns. """ gen = ObservationMetaDataGenerator() #An list containing the bounds of our queries. #The order of the tuples must correspond to the order of #self.columnMapping in ObservationMetaDataGenerator. #This was generated with a separate script which printed #the median and maximum values of all of the quantities #in our test opsim database bounds = [ ('obsHistID',(5973, 7000)), ('fieldRA',(numpy.degrees(1.370916), numpy.degrees(1.40))), ('rawSeeing',(0.728562, 0.9)), ('seeing', (0.7, 0.9)), ('dist2Moon',(numpy.degrees(1.570307), numpy.degrees(1.9))), ('expMJD',(49367.129396, 49370.0)), ('airmass',(1.420459, 1.6)), ('m5',(22.815249, 23.0)), ('skyBrightness',(19.017605, 19.5))] #test querying on a single column for line in bounds: tag = line[0] # find the index of the entry in columnMapping that # corresponds to this bound for ii in range(len(gen.columnMapping)): if gen.columnMapping[ii][0] == tag: break if tag != 'telescopeFilter' and tag != 'visitExpTime': args = {} args[tag] = line[1] results = gen.getObservationMetaData(**args) if tag == 'skyBrightness': ct = 0 for obs_metadata in results: self.assertLess(obs_metadata.skyBrightness, line[1][1]) self.assertGreater(obs_metadata.skyBrightness, line[1][0]) ct += 1 self.assertGreater(ct, 0) elif tag == 'm5': ct = 0 for obs_metadata in results: self.assertLess(obs_metadata.m5[obs_metadata.bandpass], line[1][1]) self.assertGreater(obs_metadata.m5[obs_metadata.bandpass], line[1][0]) ct += 1 self.assertGreater(ct, 0) name = gen.columnMapping[ii][2] if name is not None: if gen.columnMapping[ii][4] is not None: xmin = gen.columnMapping[ii][4](line[1][0]) xmax = gen.columnMapping[ii][4](line[1][1]) else: xmin = line[1][0] xmax = line[1][1] ct = 0 for obs_metadata in results: ct += 1 self.assertLess(obs_metadata.phoSimMetaData[name][0], xmax) self.assertGreater(obs_metadata.phoSimMetaData[name][0], xmin) #make sure that we did not accidentally choose values such that #no ObservationMetaData were ever returned self.assertGreater(ct, 0) #test querying on two columns at once ct = 0 for ix in range(len(bounds)): tag1 = bounds[ix][0] for ii in range(len(gen.columnMapping)): if gen.columnMapping[ii][0] == tag1: break if tag1 != 'telescopeFilter' and tag1 != 'visitExpTime': name1 = gen.columnMapping[ii][2] if gen.columnMapping[ii][4] is not None: xmin = gen.columnMapping[ii][4](bounds[ix][1][0]) xmax = gen.columnMapping[ii][4](bounds[ix][1][1]) else: xmin = bounds[ix][1][0] xmax = bounds[ix][1][1] for jx in range(ii+1, len(bounds)): tag2 = bounds[jx][0] for jj in range(len(gen.columnMapping)): if gen.columnMapping[jj][0] == tag2: break if tag2 != 'telescopeFilter' and tag2 != 'visitExpTime': name2 = gen.columnMapping[jj][2] if gen.columnMapping[jj][4] is not None: ymin = gen.columnMapping[jj][4](bounds[jx][1][0]) ymax = gen.columnMapping[jj][4](bounds[jx][1][1]) else: ymin = bounds[jx][1][0] ymax = bounds[jx][1][1] args = {} args[tag1] = bounds[ix][1] args[tag2] = bounds[jx][1] results = gen.getObservationMetaData(**args) if name1 is not None or name2 is not None: for obs_metadata in results: ct += 1 if name1 is not None: self.assertGreater(obs_metadata.phoSimMetaData[name1][0], xmin) self.assertLess(obs_metadata.phoSimMetaData[name1][0], xmax) if name2 is not None: self.assertGreater(obs_metadata.phoSimMetaData[name2][0], ymin) self.assertLess(obs_metadata.phoSimMetaData[name2][0], ymax) #Make sure that we didn't choose values such that no ObservationMetaData were #ever returned self.assertGreater(ct, 0)
def testObsMetaDataBounds(self): """ Make sure that the bound specifications (i.e. a circle or a box on the sky) are correctly passed through to the resulting ObservationMetaData """ gen = ObservationMetaDataGenerator() #Test a cirlce with a specified radius results = gen.getObservationMetaData(fieldRA=numpy.degrees(1.370916), telescopeFilter='i', boundLength=0.9) ct = 0 for obs_metadata in results: self.assertTrue(isinstance(obs_metadata.bounds,CircleBounds)) self.assertAlmostEqual(obs_metadata.bounds.radiusdeg,0.9,10) self.assertAlmostEqual(obs_metadata.bounds.RA,obs_metadata.phoSimMetaData['Unrefracted_RA'][0],10) self.assertAlmostEqual(obs_metadata.bounds.DEC,obs_metadata.phoSimMetaData['Unrefracted_Dec'][0],10) ct += 1 #Make sure that some ObservationMetaData were tested self.assertTrue(ct>0) #test a square results = gen.getObservationMetaData(fieldRA=numpy.degrees(1.370916), telescopeFilter='i', boundType='box', boundLength=1.2) ct = 0 for obs_metadata in results: RAdeg = numpy.degrees(obs_metadata.phoSimMetaData['Unrefracted_RA'][0]) DECdeg = numpy.degrees(obs_metadata.phoSimMetaData['Unrefracted_Dec'][0]) self.assertTrue(isinstance(obs_metadata.bounds,BoxBounds)) self.assertAlmostEqual(obs_metadata.bounds.RAminDeg,RAdeg-1.2,10) self.assertAlmostEqual(obs_metadata.bounds.RAmaxDeg,RAdeg+1.2,10) self.assertAlmostEqual(obs_metadata.bounds.DECminDeg,DECdeg-1.2,10) self.assertAlmostEqual(obs_metadata.bounds.DECmaxDeg,DECdeg+1.2,10) self.assertAlmostEqual(obs_metadata.bounds.RA,obs_metadata.phoSimMetaData['Unrefracted_RA'][0],10) self.assertAlmostEqual(obs_metadata.bounds.DEC,obs_metadata.phoSimMetaData['Unrefracted_Dec'][0],10) ct += 1 #Make sure that some ObservationMetaData were tested self.assertTrue(ct>0) #test a rectangle results = gen.getObservationMetaData(fieldRA=numpy.degrees(1.370916), telescopeFilter='i', boundType='box', boundLength=(1.2,0.6)) ct = 0 for obs_metadata in results: RAdeg = numpy.degrees(obs_metadata.phoSimMetaData['Unrefracted_RA'][0]) DECdeg = numpy.degrees(obs_metadata.phoSimMetaData['Unrefracted_Dec'][0]) self.assertTrue(isinstance(obs_metadata.bounds,BoxBounds)) self.assertAlmostEqual(obs_metadata.bounds.RAminDeg,RAdeg-1.2,10) self.assertAlmostEqual(obs_metadata.bounds.RAmaxDeg,RAdeg+1.2,10) self.assertAlmostEqual(obs_metadata.bounds.DECminDeg,DECdeg-0.6,10) self.assertAlmostEqual(obs_metadata.bounds.DECmaxDeg,DECdeg+0.6,10) self.assertAlmostEqual(obs_metadata.bounds.RA,obs_metadata.phoSimMetaData['Unrefracted_RA'][0],10) self.assertAlmostEqual(obs_metadata.bounds.DEC,obs_metadata.phoSimMetaData['Unrefracted_Dec'][0],10) ct += 1 #Make sure that some ObservationMetaData were tested self.assertTrue(ct>0)
def setUpClass(cls): # Set directory where scratch work will be done cls.scratchDir = tempfile.mkdtemp(dir=ROOT, prefix='scratchSpace-') # ObsMetaData instance with spatial window within which we will # put galaxies in a fake galaxy catalog cls.obsMetaDataforCat = ObservationMetaData(boundType='circle', boundLength=np.degrees(0.25), pointingRA=np.degrees(0.13), pointingDec=np.degrees(-1.2), bandpassName=['r'], mjd=49350.) # Randomly generate self.size Galaxy positions within the spatial window # of obsMetaDataforCat cls.dbname = os.path.join(cls.scratchDir, 'galcat.db') cls.size = 1000 cls.GalaxyPositionSamps = sample_obsmetadata(obsmetadata=cls.obsMetaDataforCat, size=cls.size) # Create a galaxy Table overlapping with the obsMetaData Spatial Bounds # using positions from the samples above and a database name given by # self.dbname vals = cls._createFakeGalaxyDB() cls.valName = os.path.join(cls.scratchDir, 'valsFromTest.dat') with open(cls.valName, 'w') as f: for i, v in enumerate(vals[0]): f.write(str(np.radians(vals[0][i])) + ' ' + str(np.radians(vals[1][i])) + '\n') # fig, ax = plt.subplots() # ax.plot(vals[0][:1000], vals[1][: 1000], '.') # ax.plot([0.13], [-1.2], 'rs', markersize=8) # fig.savefig(os.path.join(cls.scratchDir, 'match_galDBPosns.pdf')) # Read it into a CatalogDBObject galDB class MyGalaxyCatalog(CatalogDBObject): ''' Create a like CatalogDBObject connecting to a local sqlite database ''' objid = 'mytestgals' tableid = 'gals' idColKey = 'id' objectTypeId = 0 appendint = 10000 database = cls.dbname # dbAddress = './testData/galcat.db' raColName = 'raJ2000' decColName = 'decJ2000' driver = 'sqlite' # columns required to convert the ra, dec values in degrees # to radians again columns = [('id', 'id', int), ('raJ2000', 'raJ2000 * PI()/ 180. '), ('decJ2000', 'decJ2000 * PI()/ 180.'), ('redshift', 'redshift')] cls.galDB = MyGalaxyCatalog(database=cls.dbname) # Generate a set of Observation MetaData Outputs that overlap # the galaxies in space opsimPath = os.path.join(getPackageDir('sims_data'), 'OpSimData') opsimDB = os.path.join(opsimPath, 'opsimblitz1_1133_sqlite.db') generator = ObservationMetaDataGenerator(database=opsimDB) cls.obsMetaDataResults = generator.getObservationMetaData(limit=100, fieldRA=(5.0, 8.0), fieldDec=(-85., -60.), expMJD=(49300., 49400.), boundLength=0.15, boundType='circle') sncatalog = SNIaCatalog(db_obj=cls.galDB, obs_metadata=cls.obsMetaDataResults[6], column_outputs=['t0', 'flux_u', 'flux_g', 'flux_r', 'flux_i', 'flux_z', 'flux_y', 'mag_u', 'mag_g', 'mag_r', 'mag_i', 'mag_z', 'mag_y', 'adu_u', 'adu_g', 'adu_r', 'adu_i', 'adu_z', 'adu_y', 'mwebv']) sncatalog.suppressDimSN = True sncatalog.midSurveyTime = sncatalog.mjdobs - 20. sncatalog.snFrequency = 1.0 cls.fullCatalog = os.path.join(cls.scratchDir, 'testSNCatalogTest.dat') sncatalog.write_catalog(cls.fullCatalog) # Create a SNCatalog based on GalDB, and having times of explosions # overlapping the times in obsMetaData cls.fnameList = cls._writeManySNCatalogs(cls.obsMetaDataResults)
t = time.time() # Get opsim data. opsdb = '/Users/lynnej/opsim/db/minion_1016_newsky.db' generator = ObservationMetaDataGenerator(database=opsdb, driver='sqlite') night = 203 query = 'select min(expMJD), max(expMJD) from summary where night=%d and filter="r"' % ( night) res = generator.opsimdb.execute_arbitrary(query) expMJD_min = res[0][0] expMJD_max = res[0][1] # Test image (deep, r band, near ecliptic) obsMetaDataResults = generator.getObservationMetaData(expMJD=(expMJD_min, expMJD_max), boundLength=2.2) dt, t = dtime(t) print('To query opsim database: %f seconds' % (dt)) write_header = True write_mode = 'w' #ssmObj = NEOObj() ssmObj = SolarSystemObj() for obs in obsMetaDataResults: #print obs.mjd, obs.unrefractedRA, obs.unrefractedDec, obs.bandpass, obs.boundType, obs.boundLength mySsmDb = ssmCatCamera(ssmObj, obs_metadata=obs)
t = time.time() # Get opsim data. opsdb = '/Users/lynnej/opsim/db/enigma_1189_sqlite.db' generator = ObservationMetaDataGenerator(database=opsdb, driver='sqlite') night = 747 query = 'select min(expMJD), max(expMJD) from summary where night=%d' %(night) res = generator.opsimdb.execute_arbitrary(query) expMJD_min = res[0][0] expMJD_max = res[0][1] #obsMetaDataResults = generator.getObservationMetaData(expMJD=(expMJD_min, expMJD_max), boundLength=2.2) # Test image (deep, r band, near ecliptic) obsMetaDataResults = generator.getObservationMetaData(expMJD=50491.36028, boundLength=2.2) dt, t = dtime(t) print 'To query opsim database: %f seconds' %(dt) write_header = True write_mode = 'w' #ssmObj = NEOObj() ssmObj = SolarSystemObj() for obs in obsMetaDataResults: #print obs.mjd, obs.unrefractedRA, obs.unrefractedDec, obs.bandpass, obs.boundType, obs.boundLength mySsmDb = ssmCatCamera(ssmObj, obs_metadata = obs) #mySsmDb = ssmCat(ssmObj, obs_metadata = obs)
class ObservationMetaDataGeneratorTest(unittest.TestCase): longMessage = True @classmethod def tearDownClass(cls): sims_clean_up() def setUp(self): dbPath = os.path.join(getPackageDir('sims_data'), 'OpSimData/opsimblitz1_1133_sqlite.db') self.gen = ObservationMetaDataGenerator(database=dbPath, driver='sqlite') def tearDown(self): del self.gen def testExceptions(self): """ Make sure that RuntimeErrors get raised when they should """ gen = self.gen self.assertRaises(RuntimeError, gen.getObservationMetaData) self.assertRaises(RuntimeError, gen.getObservationMetaData, fieldRA=(1.0, 2.0, 3.0)) def testQueryOnRanges(self): """ Test that ObservationMetaData objects returned by queries of the form min < value < max are, in fact, within that range. Test when querying on both a single and two columns. """ gen = self.gen # An list containing the bounds of our queries. # The order of the tuples must correspond to the order of # self.columnMapping in ObservationMetaDataGenerator. # This was generated with a separate script which printed # the median and maximum values of all of the quantities # in our test opsim database bounds = [('obsHistID', (5973, 7000)), ('fieldRA', (np.degrees(1.370916), np.degrees(1.40))), ('rawSeeing', (0.728562, 0.9)), ('seeing', (0.7, 0.9)), ('dist2Moon', (np.degrees(1.570307), np.degrees(1.9))), ('expMJD', (49367.129396, 49370.0)), ('m5', (22.815249, 23.0)), ('skyBrightness', (19.017605, 19.5))] # test querying on a single column for line in bounds: tag = line[0] args = {tag: line[1]} results = gen.getObservationMetaData(**args) msg = "failed querying on %s" % tag self.assertGreater(len(results), 0, msg=msg) for obs in results: val = get_val_from_obs(tag, obs) self.assertGreaterEqual(val, line[1][0], msg=msg) self.assertLessEqual(val, line[1][1], msg=msg) # test querying on two columns at once for ix in range(len(bounds)): tag1 = bounds[ix][0] for jx in range(ix+1, len(bounds)): tag2 = bounds[jx][0] args = {} args[tag1] = bounds[ix][1] args[tag2] = bounds[jx][1] results = gen.getObservationMetaData(**args) msg = "failed querying %s and %s" % (tag1, tag2) self.assertGreater(len(results), 0, msg=msg) for obs in results: v1 = get_val_from_obs(tag1, obs) v2 = get_val_from_obs(tag2, obs) self.assertGreaterEqual(v1, bounds[ix][1][0], msg=msg) self.assertLessEqual(v1, bounds[ix][1][1], msg=msg) self.assertGreaterEqual(v2, bounds[jx][1][0], msg=msg) self.assertLessEqual(v2, bounds[jx][1][1], msg=msg) def testOpSimQueryOnRanges(self): """ Test that getOpimRecords() returns correct results """ bounds = [('obsHistID', (5973, 7000)), ('fieldRA', (np.degrees(1.370916), np.degrees(1.40))), ('rawSeeing', (0.728562, 0.9)), ('seeing', (0.7, 0.9)), ('dist2Moon', (np.degrees(1.570307), np.degrees(1.9))), ('expMJD', (49367.129396, 49370.0)), ('m5', (22.815249, 23.0)), ('skyBrightness', (19.017605, 19.5))] for line in bounds: tag = line[0] args = {tag: line[1]} results = self.gen.getOpSimRecords(**args) msg = 'failed querying %s ' % tag self.assertGreater(len(results), 0) for rec in results: val = get_val_from_rec(tag, rec) self.assertGreaterEqual(val, line[1][0], msg=msg) self.assertLessEqual(val, line[1][1], msg=msg) for ix in range(len(bounds)): tag1 = bounds[ix][0] for jx in range(ix+1, len(bounds)): tag2 = bounds[jx][0] args = {tag1: bounds[ix][1], tag2: bounds[jx][1]} results = self.gen.getOpSimRecords(**args) msg = 'failed while querying %s and %s' % (tag1, tag2) self.assertGreater(len(results), 0) for rec in results: v1 = get_val_from_rec(tag1, rec) v2 = get_val_from_rec(tag2, rec) self.assertGreaterEqual(v1, bounds[ix][1][0], msg=msg) self.assertLessEqual(v1, bounds[ix][1][1], msg=msg) self.assertGreaterEqual(v2, bounds[jx][1][0], msg=msg) self.assertLessEqual(v2, bounds[jx][1][1], msg=msg) def testQueryExactValues(self): """ Test that ObservationMetaData returned by a query demanding an exact value do, in fact, adhere to that requirement. """ gen = self.gen bounds = [('obsHistID', 5973), ('expDate', 1220779), ('fieldRA', np.degrees(1.370916)), ('fieldDec', np.degrees(-0.456238)), ('moonRA', np.degrees(2.914132)), ('moonDec', np.degrees(0.06305)), ('rotSkyPos', np.degrees(3.116656)), ('telescopeFilter', 'i'), ('rawSeeing', 0.728562), ('seeing', 0.88911899999999999), ('sunAlt', np.degrees(-0.522905)), ('moonAlt', np.degrees(0.099096)), ('dist2Moon', np.degrees(1.570307)), ('moonPhase', 52.2325), ('expMJD', 49367.129396), ('visitExpTime', 30.0), ('m5', 22.815249), ('skyBrightness', 19.017605)] for ii in range(len(bounds)): tag = bounds[ii][0] args = {} args[tag] = bounds[ii][1] results = gen.getObservationMetaData(**args) msg = 'failed querying %s' % tag self.assertGreater(len(results), 0, msg=msg) for obs in results: self.assertEqual(get_val_from_obs(tag, obs), bounds[ii][1], msg=msg) def testOpSimQueryExact(self): """ Test that querying OpSim records for exact values works """ bounds = [('obsHistID', 5973), ('expDate', 1220779), ('fieldRA', np.degrees(1.370916)), ('fieldDec', np.degrees(-0.456238)), ('moonRA', np.degrees(2.914132)), ('moonDec', np.degrees(0.06305)), ('rotSkyPos', np.degrees(3.116656)), ('telescopeFilter', 'i'), ('rawSeeing', 0.728562), ('seeing', 0.88911899999999999), ('sunAlt', np.degrees(-0.522905)), ('moonAlt', np.degrees(0.099096)), ('dist2Moon', np.degrees(1.570307)), ('moonPhase', 52.2325), ('expMJD', 49367.129396), ('visitExpTime', 30.0), ('m5', 22.815249), ('skyBrightness', 19.017605)] for line in bounds: tag = line[0] args = {tag: line[1]} results = self.gen.getOpSimRecords(**args) msg = 'failed while querying %s' % tag self.assertGreater(len(results), 0, msg=msg) for rec in results: self.assertEqual(get_val_from_rec(tag, rec), line[1], msg=msg) def testPassInOtherQuery(self): """ Test that you can pass OpSim pointings generated from another source into an ObservationMetaDataGenerator and still get ObservationMetaData out """ pointing_list = self.gen.getOpSimRecords(fieldRA=np.degrees(1.370916)) self.assertGreater(len(pointing_list), 1) local_gen = ObservationMetaDataGenerator() obs_list = local_gen.ObservationMetaDataFromPointingArray(pointing_list) self.assertEqual(len(obs_list), len(pointing_list)) for pp in pointing_list: obs = local_gen.ObservationMetaDataFromPointing(pp) self.assertIsInstance(obs, ObservationMetaData) def testQueryLimit(self): """ Test that, when we specify a limit on the number of ObservationMetaData we want returned, that limit is respected """ gen = self.gen results = gen.getObservationMetaData(fieldRA=(np.degrees(1.370916), np.degrees(1.5348635)), limit=20) self.assertEqual(len(results), 20) def testQueryOnFilter(self): """ Test that queries on the filter work. """ gen = self.gen results = gen.getObservationMetaData(fieldRA=np.degrees(1.370916), telescopeFilter='i') ct = 0 for obs_metadata in results: self.assertAlmostEqual(obs_metadata._pointingRA, 1.370916) self.assertEqual(obs_metadata.bandpass, 'i') ct += 1 # Make sure that more than zero ObservationMetaData were returned self.assertGreater(ct, 0) def testObsMetaDataBounds(self): """ Make sure that the bound specifications (i.e. a circle or a box on the sky) are correctly passed through to the resulting ObservationMetaData """ gen = self.gen # Test a cirlce with a specified radius results = gen.getObservationMetaData(fieldRA=np.degrees(1.370916), telescopeFilter='i', boundLength=0.9) ct = 0 for obs_metadata in results: self.assertTrue(isinstance(obs_metadata.bounds, CircleBounds), msg='obs_metadata.bounds is not an intance of ' 'CircleBounds') # include some wiggle room, in case ObservationMetaData needs to # adjust the boundLength to accommodate the transformation between # ICRS and observed coordinates self.assertGreaterEqual(obs_metadata.bounds.radiusdeg, 0.9) self.assertLessEqual(obs_metadata.bounds.radiusdeg, 0.95) self.assertAlmostEqual(obs_metadata.bounds.RA, np.radians(obs_metadata.pointingRA), 5) self.assertAlmostEqual(obs_metadata.bounds.DEC, np.radians(obs_metadata.pointingDec), 5) ct += 1 # Make sure that some ObservationMetaData were tested self.assertGreater(ct, 0) boundLengthList = [1.2, (1.2, 0.6)] for boundLength in boundLengthList: results = gen.getObservationMetaData(fieldRA=np.degrees(1.370916), telescopeFilter='i', boundType='box', boundLength=boundLength) if hasattr(boundLength, '__len__'): dra = boundLength[0] ddec = boundLength[1] else: dra = boundLength ddec = boundLength ct = 0 for obs_metadata in results: RAdeg = obs_metadata.pointingRA DECdeg = obs_metadata.pointingDec self.assertTrue(isinstance(obs_metadata.bounds, BoxBounds), msg='obs_metadata.bounds is not an instance of ' 'BoxBounds') self.assertAlmostEqual(obs_metadata.bounds.RAminDeg, RAdeg-dra, 10) self.assertAlmostEqual(obs_metadata.bounds.RAmaxDeg, RAdeg+dra, 10) self.assertAlmostEqual(obs_metadata.bounds.DECminDeg, DECdeg-ddec, 10) self.assertAlmostEqual(obs_metadata.bounds.DECmaxDeg, DECdeg+ddec, 10) self.assertAlmostEqual(obs_metadata.bounds.RA, np.radians(obs_metadata.pointingRA), 5) self.assertAlmostEqual(obs_metadata.bounds.DEC, np.radians(obs_metadata.pointingDec), 5) ct += 1 # Make sure that some ObservationMetaData were tested self.assertGreater(ct, 0) def testQueryOnNight(self): """ Check that the ObservationMetaDataGenerator can query on the 'night' column in the OpSim summary table """ # the test database goes from night=0 to night=28 # corresponding to 49353.032079 <= mjd <= 49381.38533 night0 = 49353.032079 results = self.gen.getObservationMetaData(night=(11, 13)) self.assertGreater(len(results), 1800) # there should be about 700 observations a night; # make sure we get at least 600 for obs in results: self.assertGreaterEqual(obs.mjd.TAI, night0+11.0) self.assertLessEqual(obs.mjd.TAI, night0+13.5) # the 0.5 is there because the last observation on night 13 could be # 13 days and 8 hours after the first observation on night 0 self.assertGreaterEqual(obs._OpsimMetaData['night'], 11) self.assertLessEqual(obs._OpsimMetaData['night'], 13) # query for an exact night results = self.gen.getObservationMetaData(night=15) self.assertGreater(len(results), 600) # there should be about 700 observations a night; # make sure we get at least 600 for obs in results: self.assertEqual(obs._OpsimMetaData['night'], 15) self.assertGreaterEqual(obs.mjd.TAI, night0+14.9) self.assertLessEqual(obs.mjd.TAI, night0+15.9) def testCreationOfPhoSimCatalog(self): """ Make sure that we can create PhoSim input catalogs using the returned ObservationMetaData. This test will just make sure that all of the expected header entries are there. """ dbName = tempfile.mktemp(dir=ROOT, prefix='obsMetaDataGeneratorTest-', suffix='.db') makePhoSimTestDB(filename=dbName) bulgeDB = testGalaxyBulgeDBObj(driver='sqlite', database=dbName) gen = self.gen results = gen.getObservationMetaData(fieldRA=np.degrees(1.370916), telescopeFilter='i') testCat = PhoSimCatalogSersic2D(bulgeDB, obs_metadata=results[0]) testCat.phoSimHeaderMap = {} with lsst.utils.tests.getTempFilePath('.txt') as catName: testCat.write_catalog(catName) if os.path.exists(dbName): os.unlink(dbName) def testCreationOfPhoSimCatalog_2(self): """ Make sure that we can create PhoSim input catalogs using the returned ObservationMetaData. Use the actual DefaultPhoSimHeader map; make sure that opsim_version does not make it into the header. """ dbName = tempfile.mktemp(dir=ROOT, prefix='obsMetaDataGeneratorTest-', suffix='.db') makePhoSimTestDB(filename=dbName) bulgeDB = testGalaxyBulgeDBObj(driver='sqlite', database=dbName) gen = self.gen results = gen.getObservationMetaData(fieldRA=np.degrees(1.370916), telescopeFilter='i') testCat = PhoSimCatalogSersic2D(bulgeDB, obs_metadata=results[0]) testCat.phoSimHeaderMap = DefaultPhoSimHeaderMap with lsst.utils.tests.getTempFilePath('.txt') as catName: testCat.write_catalog(catName) ct_lines = 0 with open(catName, 'r') as in_file: for line in in_file: ct_lines += 1 self.assertNotIn('opsim_version', line) self.assertGreater(ct_lines, 10) # check that some lines did get written if os.path.exists(dbName): os.unlink(dbName) def testCreationOfPhoSimCatalog_3(self): """ Make sure that we can create PhoSim input catalogs using the returned ObservationMetaData. Test that an error is actually raised if we try to build a PhoSim catalog with a v3 header map using a v4 ObservationMetaData """ dbName = tempfile.mktemp(dir=ROOT, prefix='obsMetaDataGeneratorTest-', suffix='.db') makePhoSimTestDB(filename=dbName) bulgeDB = testGalaxyBulgeDBObj(driver='sqlite', database=dbName) opsim_db = os.path.join(getPackageDir('sims_data'), 'OpSimData', 'astro-lsst-01_2014.db') assert os.path.isfile(opsim_db) gen = ObservationMetaDataGenerator(opsim_db, driver='sqlite') results = gen.getObservationMetaData(fieldRA=(70.0, 85.0), telescopeFilter='i') self.assertGreater(len(results), 0) testCat = PhoSimCatalogSersic2D(bulgeDB, obs_metadata=results[0]) testCat.phoSimHeaderMap = DefaultPhoSimHeaderMap with lsst.utils.tests.getTempFilePath('.txt') as catName: with self.assertRaises(RuntimeError): testCat.write_catalog(catName) if os.path.exists(dbName): os.unlink(dbName)
SearchRegionDec = (-30.0, -20.0) SearchAirmass = (1.0, 1.5) DesiredFilter = None import eups import os from lsst.sims.catUtils.utils import ObservationMetaDataGenerator from lsst.sims.catalogs.generation.db import ObservationMetaData #help(ObservationMetaDataGenerator) opsimdb = os.path.join(eups.productDir('sims_data'), 'OpSimData', 'enigma_1189_sqlite.db') gen = ObservationMetaDataGenerator(driver='sqlite', database=opsimdb) SimObData = gen.getObservationMetaData(boundType=AperatureType, boundLength=AperatureRadius, fieldRA=SearchRegionRA, fieldDec=SearchRegionDec, airmass=SearchAirmass, telescopeFilter=DesiredFilter) NumOfObservations = len(SimObData) #print SimsObData[0].__dict__ from prettytable import PrettyTable UniquePointings = list({(o.unrefractedRA, o.unrefractedDec) for o in SimObData}) NumOfPointings = len(UniquePointings) print 'Number of Unique Pointings:', NumOfPointings, '\n' table2 = PrettyTable(["Pointing RA", "Pointing Dec"]) for x in UniquePointings: table2.add_row([x[0], x[1]]) print table2
def test_multiband_light_curves(self): """ Check that multi-band light curves are returned correctly. """ raRange = (78.0, 82.0) decRange = (-69.0, -65.0) bandpass = ('r', 'g') gen = AgnLightCurveGenerator(self.agn_db, self.opsimDb) pointings = gen.get_pointings(raRange, decRange, bandpass=bandpass) for row in pointings: for obs in row: mjd = ModifiedJulianDate(TAI=obs.mjd.TAI - 49000.0 + 59580.0) obs.mjd = mjd lc_dict, truth_info = gen.light_curves_from_pointings(pointings) self.assertGreater(len(lc_dict), 2) obs_gen = ObservationMetaDataGenerator(database=self.opsimDb, driver='sqlite') control_pointings_r = obs_gen.getObservationMetaData( fieldRA=raRange, fieldDec=decRange, telescopeFilter='r', boundLength=1.75) for obs in control_pointings_r: mjd = ModifiedJulianDate(TAI=obs.mjd.TAI - 49000.0 + 59580.0) obs.mjd = mjd control_pointings_g = obs_gen.getObservationMetaData( fieldRA=raRange, fieldDec=decRange, telescopeFilter='g', boundLength=1.75) for obs in control_pointings_g: mjd = ModifiedJulianDate(TAI=obs.mjd.TAI - 49000.0 + 59580.0) obs.mjd = mjd self.assertGreater(len(control_pointings_g), 0) self.assertGreater(len(control_pointings_r), 0) ct = 0 for obs in control_pointings_r: cat = agnControlCatalog(self.agn_db, obs_metadata=obs) for star_obj in cat.iter_catalog(): ct += 1 lc = lc_dict[star_obj[0]]['r'] dex = np.argmin(np.abs(lc['mjd'] - obs.mjd.TAI)) self.assertLess(np.abs(lc['mjd'][dex] - obs.mjd.TAI), 1.0e-7) self.assertLess(np.abs(lc['mag'][dex] - star_obj[3]), 1.0e-7) self.assertLess(np.abs(lc['error'][dex] - star_obj[4]), 1.0e-7) for obs in control_pointings_g: cat = agnControlCatalog(self.agn_db, obs_metadata=obs) for star_obj in cat.iter_catalog(): ct += 1 lc = lc_dict[star_obj[0]]['g'] dex = np.argmin(np.abs(lc['mjd'] - obs.mjd.TAI)) self.assertLess(np.abs(lc['mjd'][dex] - obs.mjd.TAI), 1.0e-7) self.assertLess(np.abs(lc['mag'][dex] - star_obj[3]), 1.0e-7) self.assertLess(np.abs(lc['error'][dex] - star_obj[4]), 1.0e-7) # Verify that the catalogs and LightCurveGenerator returned the # same number of observations total_ct = 0 for obj_name in lc_dict: for band in lc_dict[obj_name]: total_ct += len(lc_dict[obj_name][band]['mjd']) self.assertEqual(ct, total_ct)
def setUpClass(cls): # Set directory where scratch work will be done cls.madeScratchDir = False cls.scratchDir = 'scratchSpace' # Setup a directory in which test data will be made if not os.path.exists(cls.scratchDir): os.makedirs(cls.scratchDir) cls.madeScratchDir = True # ObsMetaData instance with spatial window within which we will # put galaxies in a fake galaxy catalog cls.obsMetaDataforCat = ObservationMetaData(boundType='circle', boundLength=np.degrees(0.25), pointingRA=np.degrees(0.13), pointingDec=np.degrees(-1.2), bandpassName=['r'], mjd=49350.) # Randomly generate self.size Galaxy positions within the spatial window # of obsMetaDataforCat cls.dbname = os.path.join(cls.scratchDir, 'galcat.db') cls.size = 1000 cls.GalaxyPositionSamps = sample_obsmetadata( obsmetadata=cls.obsMetaDataforCat, size=cls.size) # Create a galaxy Table overlapping with the obsMetaData Spatial Bounds # using positions from the samples above and a database name given by # self.dbname vals = cls._createFakeGalaxyDB() with open('valsFromTest.dat', 'w') as f: for i, v in enumerate(vals[0]): f.write(str(np.radians(vals[0][i])) + ' ' + str(np.radians(vals[1][i])) + '\n') # fig, ax = plt.subplots() # ax.plot(vals[0][:1000], vals[1][: 1000], '.') # ax.plot([0.13], [-1.2], 'rs', markersize=8) # fig.savefig(os.path.join(cls.scratchDir, 'match_galDBPosns.pdf')) # Read it into a CatalogDBObject galDB class MyGalaxyCatalog(CatalogDBObject): ''' Create a like CatalogDBObject connecting to a local sqlite database ''' objid = 'mytestgals' tableid = 'gals' idColKey = 'id' objectTypeId = 0 appendint = 10000 database = cls.dbname # dbAddress = './testData/galcat.db' raColName = 'raJ2000' decColName = 'decJ2000' driver = 'sqlite' # columns required to convert the ra, dec values in degrees # to radians again columns = [('id', 'id', int), ('raJ2000','raJ2000 * PI()/ 180. '), ('decJ2000','decJ2000 * PI()/ 180.'), ('redshift', 'redshift')] # class galCopy(InstanceCatalog): # column_outputs = ['id', 'raJ2000', 'decJ2000', 'redshift'] # override_formats = {'raJ2000': '%8e', 'decJ2000': '%8e'} cls.galDB = MyGalaxyCatalog(database=cls.dbname) # cls.galphot = galCopy(db_obj=cls.galDB, # obs_metadata=cls.obsMetaDataforCat) # cls.galPhotFname = os.path.join(cls.scratchDir, 'gals.dat') # cls.galphot.write_catalog(cls.galPhotFname) # Generate a set of Observation MetaData Outputs that overlap # the galaxies in space opsimPath = os.path.join(eups.productDir('sims_data'),'OpSimData') opsimDB = os.path.join(opsimPath,'opsimblitz1_1133_sqlite.db') generator = ObservationMetaDataGenerator() cls.obsMetaDataResults = generator.getObservationMetaData(limit=100, fieldRA=(5.0, 8.0), fieldDec=(-85.,-60.), expMJD=(49300., 49400.), boundLength=0.15, boundType='circle') # cls.obsMetaDataResults has obsMetaData corresponding to 15 pointings # This is tested in test_obsMetaDataGeneration # v = zip(*map(cls.coords, cls.obsMetaDataResults)) # fig2, ax2 = plt.subplots() # ax2.plot(vals[0][:1000], vals[1][: 1000], '.') # ax2.plot(v[0], v[1], 'ko', markersize=8) # ax2.axhline(-np.pi, color='k', lw=2) # ax2.axhline(np.pi, color='k', lw=2) # ax2.axvline(0., color='k', lw=2.) # ax2.axvline(2. * np.pi, color='k', lw=2.) # fig2.savefig(os.path.join(cls.scratchDir, 'matchPointings.pdf')) #print 'cls.obsMetaDataforCat' #print cls.obsMetaDataforCat.summary #print 'obsMetaDataResults' # obsMetaDataList = [] # for obsMetaData in tmpobsMetaDataResults: # obsMetaDataList.append(ObservationMetaData(boundType='circle', # boundLength=np.degrees(0.05), # unrefractedRA=np.degrees(0.13), # unrefractedDec=np.degrees(-1.2), # bandpassName=obsMetaData.bandpass, # mjd=obsMetaData.mjd)) # cls.obsMetaDataResults = tmpobsMetaDataResults# pobsMetaDataList # self.catalogList = self._writeManySNCatalogs() sncatalog = SNIaCatalog(db_obj=cls.galDB, obs_metadata=cls.obsMetaDataResults[12], column_outputs=['t0', 'flux_u', 'flux_g', \ 'flux_r', 'flux_i', 'flux_z',\ 'flux_y', 'mag_u', 'mag_g',\ 'mag_r', 'mag_i', 'mag_z', \ 'mag_y', 'adu_u', 'adu_g',\ 'adu_r', 'adu_i', 'adu_z', \ 'adu_y','mwebv']) sncatalog.suppressDimSN = True sncatalog.midSurveyTime = sncatalog.mjdobs - 20. sncatalog.snFrequency = 1.0 cls.fullCatalog = cls.scratchDir + '/testSNCatalogTest.dat' sncatalog.write_catalog(cls.fullCatalog) # Create a SNCatalog based on GalDB, and having times of explosions # overlapping the times in obsMetaData cls.fnameList = cls._writeManySNCatalogs(cls.obsMetaDataResults)
def testQueryOnRanges(self): """ Test that ObservationMetaData objects returned by queries of the form min < value < max are, in fact, within that range. Test when querying on both a single and two columns. """ gen = ObservationMetaDataGenerator() #An list containing the bounds of our queries. #The order of the tuples must correspond to the order of #self.columnMapping in ObservationMetaDataGenerator. #This was generated with a separate script which printed #the median and maximum values of all of the quantities #in our test opsim database bounds = [ ('obsHistID',(5973, 11080)), ('expDate',(1220779, 1831593)), ('fieldRA',(numpy.degrees(1.370916), numpy.degrees(1.5348635))), ('fieldDec',(numpy.degrees(-0.456238), numpy.degrees(0.0597905))), ('moonRA',(numpy.degrees(2.914132), numpy.degrees(4.5716525))), ('moonDec',(numpy.degrees(0.06305), numpy.degrees(0.2216745))), ('rotSkyPos',(numpy.degrees(3.116656), numpy.degrees(4.6974265))), ('telescopeFilter',('i','i')), ('rawSeeing',(0.728562, 1.040495)), ('seeing', (0.7, 0.9)), ('sunAlt',(numpy.degrees(-0.522905), numpy.degrees(-0.366073))), ('moonAlt',(numpy.degrees(0.099096), numpy.degrees(0.5495415))), ('dist2Moon',(numpy.degrees(1.570307), numpy.degrees(2.347868))), ('moonPhase',(52.2325, 76.0149785)), ('expMJD',(49367.129396, 49374.1990025)), ('altitude',(numpy.degrees(0.781015), numpy.degrees(1.1433785))), ('azimuth',(numpy.degrees(3.470077), numpy.degrees(4.8765995))), ('visitExpTime',(30.0,30.0)), ('airmass',(1.420459, 2.0048075)), ('m5',(22.815249, 24.0047695)), ('skyBrightness',(19.017605, 20.512553))] #test querying on a single column for (ii,line) in enumerate(bounds): tag = line[0] if tag != 'telescopeFilter' and tag != 'visitExpTime': args = {} args[tag] = line[1] results = gen.getObservationMetaData(**args) if tag == 'skyBrightness': ct = 0 for obs_metadata in results: self.assertTrue(obs_metadata.skyBrightness<line[1][1]) self.assertTrue(obs_metadata.skyBrightness>line[1][0]) ct += 1 self.assertTrue(ct>0) elif tag == 'm5': ct = 0 for obs_metadata in results: self.assertTrue(obs_metadata.m5[obs_metadata.bandpass]<line[1][1]) self.assertTrue(obs_metadata.m5[obs_metadata.bandpass]>line[1][0]) ct += 1 self.assertTrue(ct>0) name = gen.columnMapping[ii][2] if name is not None: if gen.columnMapping[ii][4] is not None: xmin = gen.columnMapping[ii][4](line[1][0]) xmax = gen.columnMapping[ii][4](line[1][1]) else: xmin = line[1][0] xmax = line[1][1] ct = 0 for obs_metadata in results: ct += 1 self.assertTrue(obs_metadata.phoSimMetaData[name][0]<xmax) self.assertTrue(obs_metadata.phoSimMetaData[name][0]>xmin) #make sure that we did not accidentally choose values such that #no ObservationMetaData were ever returned self.assertTrue(ct>0) #test querying on two columns at once ct = 0 for ii in range(len(bounds)): tag1 = bounds[ii][0] if tag1 != 'telescopeFilter' and tag1 != 'visitExpTime': name1 = gen.columnMapping[ii][2] if gen.columnMapping[ii][4] is not None: xmin = gen.columnMapping[ii][4](bounds[ii][1][0]) xmax = gen.columnMapping[ii][4](bounds[ii][1][1]) else: xmin = bounds[ii][1][0] xmax = bounds[ii][1][1] for jj in range(ii+1, len(bounds)): tag2 = bounds[jj][0] if tag2 != 'telescopeFilter' and tag2 != 'visitExpTime': name2 = gen.columnMapping[jj][2] if gen.columnMapping[jj][4] is not None: ymin = gen.columnMapping[jj][4](bounds[jj][1][0]) ymax = gen.columnMapping[jj][4](bounds[jj][1][1]) else: ymin = bounds[jj][1][0] ymax = bounds[jj][1][1] args = {} args[tag1] = bounds[ii][1] args[tag2] = bounds[jj][1] results = gen.getObservationMetaData(**args) if name1 is not None or name2 is not None: for obs_metadata in results: ct += 1 if name1 is not None: self.assertTrue(obs_metadata.phoSimMetaData[name1][0]>xmin) self.assertTrue(obs_metadata.phoSimMetaData[name1][0]<xmax) if name2 is not None: self.assertTrue(obs_metadata.phoSimMetaData[name2][0]>ymin) self.assertTrue(obs_metadata.phoSimMetaData[name2][0]<ymax) #Make sure that we didn't choose values such that no ObservationMetaData were #ever returned self.assertTrue(ct>0)
def test_ssm_catalog_creation(self): t = time.time() # Fake opsim data. database = os.path.join(getPackageDir('SIMS_DATA'), 'OpSimData/opsimblitz1_1133_sqlite.db') generator = ObservationMetaDataGenerator(database=database, driver='sqlite') night = 20 query = 'select min(expMJD), max(expMJD) from summary where night=%d' % ( night) res = generator.opsimdb.execute_arbitrary(query) expMJD_min = res[0][0] expMJD_max = res[0][1] obsMetaDataResults = generator.getObservationMetaData( expMJD=(expMJD_min, expMJD_max), limit=3, boundLength=2.2) dt, t = dtime(t) print('To query opsim database: %f seconds' % (dt)) write_header = True write_mode = 'w' try: ssmObj = SolarSystemObj() for obsMeta in obsMetaDataResults: # But moving objects databases are not currently complete for all years. # Push forward to night=747. # (note that we need the phosim dictionary as well) newMJD = 59590.2 # this MJD is artificially chosen to be in the # time span of the new baseline simulated survey obs = ObservationMetaData( mjd=newMJD, pointingRA=obsMeta.pointingRA, pointingDec=obsMeta.pointingDec, bandpassName=obsMeta.bandpass, rotSkyPos=obsMeta.rotSkyPos, m5=obsMeta.m5[obsMeta.bandpass], seeing=obsMeta.seeing[obsMeta.bandpass], boundLength=obsMeta.boundLength, boundType=obsMeta.boundType) obs._OpsimMetaData = {'visitExpTime': 30} mySsmDb = ssmCatCamera(ssmObj, obs_metadata=obs) photParams = PhotometricParameters( exptime=obs.OpsimMetaData['visitExpTime'], nexp=1, bandpass=obs.bandpass) mySsmDb.photParams = photParams try: with lsst.utils.tests.getTempFilePath( '.txt') as output_cat: mySsmDb.write_catalog(output_cat, write_header=write_header, write_mode=write_mode) # verify that we did not write an empty catalog with open(output_cat, 'r') as input_file: lines = input_file.readlines() msg = 'MJD is %.3f' % obs.mjd.TAI self.assertGreater(len(lines), 1, msg=msg) except: # This is because the solar system object 'tables' # don't actually connect to tables on fatboy; they just # call methods stored on fatboy. Therefore, the connection # failure will not be noticed until this part of the test msg = sys.exc_info()[1].args[0] if 'DB-Lib error' in msg: reassure() continue else: raise write_mode = 'a' write_header = False dt, t = dtime(t) print( 'To query solar system objects: %f seconds (obs MJD time %f)' % (dt, obs.mjd.TAI)) except: trace = traceback.extract_tb(sys.exc_info()[2], limit=20) msg = sys.exc_info()[1].args[0] if 'Failed to connect' in msg or failedOnFatboy(trace): # if the exception was because of a failed connection # to fatboy, ignore it. reassure() pass else: raise
if args.log_file is not None: if os.path.exists(args.log_file): raise RuntimeError("%s already exists" % args.log_file) if os.path.exists(args.out_dir): if not os.path.isdir(args.out_dir): raise RuntimeError("%s is not a directory" % args.out_dir) if not os.path.exists(args.out_dir): os.mkdir(args.out_dir) t_start = time.time() # get list of ObservationMetaData to simulate obs_gen = ObservationMetaDataGenerator(args.opsim_db, driver='sqlite') obs_list = obs_gen.getObservationMetaData(night=15) del obs_gen sims_clean_up() print('%d obs' % len(obs_list)) # use AlertDataGenerator to separate the ObservationMetaData # by htmid alert_gen = AlertDataGenerator() alert_gen.subdivide_obs(obs_list, htmid_level=6) # Create a dict that maps an obsHistID to the list of htmids # of the trixels overlapping that observation # # Also separate list of obsHistIDs into n_proc
camera = lsst_camera() det_name_list = [] for det in camera: if det.getType() != SCIENCE: continue det_name_list.append(det.getName()) det_name_list.sort() opsimdb = os.path.join('/Users', 'danielsf', 'physics', 'lsst_150412', 'Development', 'garage', 'OpSimData', 'minion_1016_sqlite.db') assert os.path.exists(opsimdb) obs_gen = ObservationMetaDataGenerator(database=opsimdb) obs_list = obs_gen.getObservationMetaData(obsHistID=args.obs) obs = obs_list[0] filter_name = obs.bandpass site_no_atm = Site(name="LSST", pressure=0.0, humidity=0.0) obs.site = site_no_atm assert np.abs(obs.site.pressure) < 1.0e-6 assert np.abs(obs.site.humidity) < 1.0e-6 xpix_0 = np.arange(200.0, 3800.0, 1000.0) ypix_0 = np.arange(200.0, 3800.0, 1000.0) pix_grid = np.meshgrid(xpix_0, ypix_0) cam_xpix_in = pix_grid[0].flatten() cam_ypix_in = pix_grid[1].flatten() camera_wrapper = LSSTCameraWrapper()
'lsst_r', 'lsst_i', 'starnotgal', 'isvariable'] default_columns = [('isresolved', 0, int), ('isvariable', 0, int)] default_formats = {'S': '%s', 'f': '%.8f', 'i': '%i'} transformations = {'raJ2000': numpy.degrees, 'decJ2000': numpy.degrees} if __name__ == '__main__': parser = argparse.ArgumentParser(description='Generate the reference catalog') parser.add_argument('opsimDB', help='OpSim database sqlite file') parser.add_argument('-o', '--outfile', type=str, default='twinkles_ref.txt', help='Filename of output reference catalog') args = parser.parse_args() # you need to provide ObservationMetaDataGenerator with the connection # string to an OpSim output database. This is the connection string # to a test database that comes when you install CatSim. generator = ObservationMetaDataGenerator(database=args.opsimDB, driver='sqlite') obsMetaDataResults = generator.getObservationMetaData(fieldRA=(53, 54), fieldDec=(-29, -27), boundLength=0.3) # First get the reference catalog stars = CatalogDBObject.from_objid('allstars') while True: try: ref_stars = TwinklesReference(stars, obs_metadata=obsMetaDataResults[0]) break except RuntimeError: continue ref_stars.write_catalog(args.outfile, write_mode='w', write_header=True, chunk_size=20000)
from lsst.sims.utils import raDecFromPupilCoords from lsst.sims.utils import angularSeparation from lsst.sims.utils import Site from lsst.sims.photUtils import BandpassDict, Sed from lsst.sims.utils import altAzPaFromRaDec from lsst.sims.utils import ObservationMetaData bp_dict, hw_dict = BandpassDict.loadBandpassesFromFiles() opsimdb = os.path.join('/Users/danielsf/physics/lsst_150412', 'Development', 'garage', 'OpSimData', 'minion_1016_sqlite.db') obs_gen = ObservationMetaDataGenerator(opsimdb) obs_list = obs_gen.getObservationMetaData(moonAlt=(-90.0, -50.0), altitude=(55.0, 57.0), fieldDec=(-10.0, 10.0)) assert len(obs_list) > 0 obs_root = obs_list[0] obs_root.site = Site(name='LSST', pressure=0.0, humidity=0.0) phosim_header = DefaultPhoSimHeaderMap phosim_header['nsnap'] = 1 phosim_header['vistime'] = 30.0 galaxy_dir = os.path.join(getPackageDir('sims_sed_library'), 'galaxySED') galaxy_sed_list = os.listdir(galaxy_dir)
def setUpClass(cls): print('setting up %s' % sims_clean_up.targets) # These represent the dimmest magnitudes at which objects # are considered visible in each of the LSST filters # (taken from Table 2 of the overview paper) cls.obs_mag_cutoff = (23.68, 24.89, 24.43, 24.0, 24.45, 22.60) cls.opsim_db = os.path.join(getPackageDir('sims_data'), 'OpSimData', 'opsimblitz1_1133_sqlite.db') rng = np.random.RandomState(8123) obs_gen = ObservationMetaDataGenerator(database=cls.opsim_db) cls.obs_list = obs_gen.getObservationMetaData(night=(0, 2)) cls.obs_list = rng.choice(cls.obs_list, 10, replace=False) fieldid_list = [] for obs in cls.obs_list: fieldid_list.append(obs.OpsimMetaData['fieldID']) # make sure we have selected observations such that the # same field is revisited more than once assert len(np.unique(fieldid_list)) < len(fieldid_list) cls.input_dir = tempfile.mkdtemp(prefix='alertDataGen', dir=ROOT) cls.star_db_name = tempfile.mktemp(prefix='alertDataGen_star_db', dir=cls.input_dir, suffix='.db') conn = sqlite3.connect(cls.star_db_name) cursor = conn.cursor() cursor.execute('''CREATE TABLE stars (simobjid int, htmid int, ra real, dec real, umag real, gmag real, rmag real, imag real, zmag real, ymag real, px real, pmra real, pmdec real, vrad real, varParamStr text)''') conn.commit() n_stars = 10 cls.ra_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float) cls.dec_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float) u_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float) g_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float) r_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float) i_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float) z_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float) y_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float) cls.px_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float) cls.pmra_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float) cls.pmdec_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float) cls.vrad_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float) cls.amp_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float) cls.period_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float) id_offset = -n_stars for obs in cls.obs_list: id_offset += n_stars ra_0 = obs.pointingRA dec_0 = obs.pointingDec rr = rng.random_sample(n_stars) theta = rng.random_sample(n_stars)*2.0*np.pi ra = ra_0 + rr*np.cos(theta) dec = dec_0 + rr*np.sin(theta) var_period = rng.random_sample(n_stars)*0.25 var_amp = rng.random_sample(n_stars)*1.0 + 0.01 subset = rng.randint(0, high=len(var_amp)-1, size=3) var_amp[subset[:2]] = 0.0 var_amp[subset[-1]] = -1.0 umag = rng.random_sample(n_stars)*5.0 + 15.0 gmag = rng.random_sample(n_stars)*5.0 + 15.0 rmag = rng.random_sample(n_stars)*5.0 + 15.0 imag = rng.random_sample(n_stars)*5.0 + 15.0 zmag = rng.random_sample(n_stars)*5.0 + 15.0 ymag = rng.random_sample(n_stars)*5.0 + 15.0 px = rng.random_sample(n_stars)*0.1 # say it is arcsec pmra = rng.random_sample(n_stars)*50.0+100.0 # say it is arcsec/yr pmdec = rng.random_sample(n_stars)*50.0+100.0 # say it is arcsec/yr vrad = rng.random_sample(n_stars)*600.0 - 300.0 subset = rng.randint(0, high=n_stars-1, size=3) umag[subset] = 40.0 gmag[subset] = 40.0 rmag[subset] = 40.0 imag[subset] = 40.0 zmag[subset] = 40.0 ymag[subset] = 40.0 cls.ra_truth[id_offset:id_offset+n_stars] = np.round(ra, decimals=6) cls.dec_truth[id_offset:id_offset+n_stars] = np.round(dec, decimals=6) u_truth[id_offset:id_offset+n_stars] = np.round(umag, decimals=4) g_truth[id_offset:id_offset+n_stars] = np.round(gmag, decimals=4) r_truth[id_offset:id_offset+n_stars] = np.round(rmag, decimals=4) i_truth[id_offset:id_offset+n_stars] = np.round(imag, decimals=4) z_truth[id_offset:id_offset+n_stars] = np.round(zmag, decimals=4) y_truth[id_offset:id_offset+n_stars] = np.round(ymag, decimals=4) cls.px_truth[id_offset:id_offset+n_stars] = np.round(px, decimals=4) cls.pmra_truth[id_offset:id_offset+n_stars] = np.round(pmra, decimals=4) cls.pmdec_truth[id_offset:id_offset+n_stars] = np.round(pmdec, decimals=4) cls.vrad_truth[id_offset:id_offset+n_stars] = np.round(vrad, decimals=4) cls.amp_truth[id_offset:id_offset+n_stars] = np.round(var_amp, decimals=4) cls.period_truth[id_offset:id_offset+n_stars] = np.round(var_period, decimals=4) cls.max_str_len = -1 for i_star in range(n_stars): if var_amp[i_star] >= -0.1: varParamStr = ('{"m":"alert_test", "p":{"amp":%.4f, "per": %.4f}}' % (var_amp[i_star], var_period[i_star])) else: varParamStr = 'None' if len(varParamStr) > cls.max_str_len: cls.max_str_len = len(varParamStr) htmid = findHtmid(ra[i_star], dec[i_star], 21) query = ('''INSERT INTO stars VALUES(%d, %d, %.6f, %.6f, %.4f, %.4f, %.4f, %.4f, %.4f, %.4f, %.4f, %.4f, %.4f, %.4f, '%s')''' % (i_star+id_offset+1, htmid, ra[i_star], dec[i_star], umag[i_star], gmag[i_star], rmag[i_star], imag[i_star], zmag[i_star], ymag[i_star], px[i_star], pmra[i_star], pmdec[i_star], vrad[i_star], varParamStr)) cursor.execute(query) conn.commit() conn.close() cls.output_dir = tempfile.mkdtemp(dir=ROOT, prefix='alert_gen_output') cls.mag0_truth_dict = {} cls.mag0_truth_dict[0] = u_truth cls.mag0_truth_dict[1] = g_truth cls.mag0_truth_dict[2] = r_truth cls.mag0_truth_dict[3] = i_truth cls.mag0_truth_dict[4] = z_truth cls.mag0_truth_dict[5] = y_truth
def test_date_range(self): """ Run test_stellar_light_curves, this time specifying a range in MJD. """ raRange = (0.0, 110.0) decRange = (-90.0, -50.0) bandpass = '******' mjdRange = (49356.0, 49357.0) lc_gen = StellarLightCurveGenerator(self.stellar_db, self.opsimDb) pointings = lc_gen.get_pointings(raRange, decRange, bandpass=bandpass, expMJD=mjdRange) test_light_curves, truth_info = lc_gen.light_curves_from_pointings( pointings) self.assertGreater(len(test_light_curves), 2) for unique_id in test_light_curves: # verify that the sources returned all do vary by making sure that the # np.diff run on the magnitudes reutrns something non-zero self.assertGreater( np.abs(np.diff( test_light_curves[unique_id][bandpass]['mag'])).max(), 0.0) self.assertGreater( len(test_light_curves[unique_id][bandpass]['mjd']), 0) self.assertGreater( test_light_curves[unique_id][bandpass]['mjd'].min(), mjdRange[0] - 1.0e-12) self.assertLess( test_light_curves[unique_id][bandpass]['mjd'].max(), mjdRange[1] + 1.0e-12) # Now test that specifying a small chunk_size does not change the output # light curves chunk_light_curves, truth_info = lc_gen.light_curves_from_pointings( pointings, chunk_size=1) self.assertGreater(len(chunk_light_curves), 2) for unique_id in test_light_curves: self.assertEqual( len(test_light_curves[unique_id][bandpass]['mjd']), len(chunk_light_curves[unique_id][bandpass]['mjd'])) np.testing.assert_array_equal( test_light_curves[unique_id][bandpass]['mjd'], chunk_light_curves[unique_id][bandpass]['mjd']) np.testing.assert_array_equal( test_light_curves[unique_id][bandpass]['mag'], chunk_light_curves[unique_id][bandpass]['mag']) np.testing.assert_array_equal( test_light_curves[unique_id][bandpass]['error'], chunk_light_curves[unique_id][bandpass]['error']) # Now find all of the ObservationMetaData that were included in our # light curves, generate InstanceCatalogs from them separately, # and verify that the contents of the InstanceCatalogs agree with # the contents of the light curves. gen = ObservationMetaDataGenerator(database=self.opsimDb, driver='sqlite') obs_list = gen.getObservationMetaData(fieldRA=raRange, fieldDec=decRange, telescopeFilter=bandpass, expMJD=mjdRange, boundLength=1.75) ct = 0 for obs in obs_list: cat = stellarControlCatalog(self.stellar_db, obs_metadata=obs) for star_obj in cat.iter_catalog(): ct += 1 lc = test_light_curves[star_obj[0]][bandpass] dex = np.argmin(np.abs(lc['mjd'] - obs.mjd.TAI)) self.assertLess(np.abs(lc['mjd'][dex] - obs.mjd.TAI), 1.0e-7) self.assertLess(np.abs(lc['mag'][dex] - star_obj[3]), 1.0e-7) self.assertLess(np.abs(lc['error'][dex] - star_obj[4]), 1.0e-7) # Verify that the same number of objects and observations were found in the # catalogs and the LightCurveGenerator output total_ct = 0 for obj_name in test_light_curves: for bandpass in test_light_curves[obj_name]: total_ct += len(test_light_curves[obj_name][bandpass]['mjd']) self.assertEqual(ct, total_ct)
def generatePhosimInput(mode='a', runobsHistID=None): if mode == 'a': filewrite = 'append' elif mode == 'c': filewrite = 'clobber' opsimDB = os.path.join('.','kraken_1042_sqlite.db') logfilename = 'run.log' if os.path.isfile(logfilename): if filewrite =='append': pass elif filewrite == 'clobber': with open('run.log', 'w') as f: f.write('obsHistID,status,timestamp\n') else: print('file exists and mode uncertain') exit() else: with open('run.log', 'w') as f: f.write('obsHistID,status,time\n') #you need to provide ObservationMetaDataGenerator with the connection #string to an OpSim output database. This is the connection string #to a test database that comes when you install CatSim. generator = ObservationMetaDataGenerator(database=opsimDB, driver='sqlite') obsHistIDList = numpy.genfromtxt('FirstSet_obsHistIDs.csv', delimiter=',', usecols=0) obsMetaDataResults = [] # Change the slicing in this line for the range of visits for obsHistID in obsHistIDList[600:700]: if runobsHistID is not None: obsHistID = runobsHistID obsMetaDataResults.append(generator.getObservationMetaData(obsHistID=obsHistID, fieldRA=(53, 54), fieldDec=(-29, -27), boundLength=0.3)[0]) starObjNames = ['msstars', 'bhbstars', 'wdstars', 'rrlystars', 'cepheidstars'] snmodel = SNObj() for obs_metadata in obsMetaDataResults: filename = "InstanceCatalogs / phosim_input_%s.txt" \ %(obs_metadata.phoSimMetaData['Opsim_obshistid'][0]) obs_metadata.phoSimMetaData['SIM_NSNAP'] = (1, numpy.dtype(int)) obs_metadata.phoSimMetaData['SIM_VISTIME'] = (30, numpy.dtype(float)) print('Starting Visit: ', obs_metadata.phoSimMetaData['Opsim_obshistid'][0]) compoundStarDBList = [MsStarObj, BhbStarObj, WdStarObj, RRLyStarObj, CepheidStarObj] compoundGalDBList = [GalaxyBulgeObj, GalaxyDiskObj, GalaxyAgnObj] compoundStarICList = [PhoSimCatalogPoint, PhoSimCatalogPoint, PhoSimCatalogPoint, PhoSimCatalogPoint, PhoSimCatalogPoint] compoundGalICList = [PhoSimCatalogSersic2D, PhoSimCatalogSersic2D, TwinklesCatalogZPoint] snphosim = PhoSimCatalogSN(db_obj=snmodel, obs_metadata=obs_metadata, column_outputs=['EBV']) snphosim.writeSedFile = True snphosim.suppressDimSN = True snphosim.prefix = 'spectra_files/' while True: try: starCat = CompoundInstanceCatalog(compoundStarICList, compoundStarDBList, obs_metadata=obs_metadata, constraint='gmag > 11.', compoundDBclass=sprinklerCompound) starCat.write_catalog(filename, chunk_size=10000) galCat = CompoundInstanceCatalog(compoundGalICList, compoundGalDBList, obs_metadata=obs_metadata, # constraint='g_ab > 11.', compoundDBclass=sprinklerCompound) galCat.write_catalog(filename, write_mode='a', write_header=False, chunk_size=10000) snphosim.write_catalog(filename, write_header=False, write_mode='a', chunk_size=10000) if runobsHistID is not None: print('Done doing requested obsHistID') sys.exit() with open(logfilename, 'a') as f: f.write('{0:d},DONE,{1:3.6f}\n'.format(obs_metadata.phoSimMetaData['Opsim_obshistid'][0], time.time())) break except RuntimeError: continue print("Finished Writing Visit: ", obs_metadata.phoSimMetaData['Opsim_obshistid'][0])
description='Generate the reference catalog') parser.add_argument('opsimDB', help='OpSim database sqlite file') parser.add_argument('-o', '--outfile', type=str, default='twinkles_ref.txt', help='Filename of output reference catalog') args = parser.parse_args() # you need to provide ObservationMetaDataGenerator with the connection # string to an OpSim output database. This is the connection string # to a test database that comes when you install CatSim. generator = ObservationMetaDataGenerator(database=args.opsimDB, driver='sqlite') obsMetaDataResults = generator.getObservationMetaData(fieldRA=(53, 54), fieldDec=(-29, -27), boundLength=0.3) # First get the reference catalog stars = CatalogDBObject.from_objid('allstars') while True: try: ref_stars = TwinklesReference(stars, obs_metadata=obsMetaDataResults[0]) break except RuntimeError: continue ref_stars.write_catalog(args.outfile, write_mode='w', write_header=True, chunk_size=20000)
class InstanceCatalogMaker(object): """ Class for creating instance catalogs. Attributes ---------- gen : lsst.sims.catUtils.utils.ObservationMetaDataGenerator db_config : dict Dictionary of database connection parameters. logger : logging.logger Logger object. """ star_objs = ['msstars', 'bhbstars', 'wdstars', 'rrlystars', 'cepheidstars'] gal_objs = ['galaxyBulge', 'galaxyDisk'] def __init__(self, opsim_db, db_config=None, logger=None): """ Constructor. Parameters ---------- opsim_db : str sqlite3 db file containing observing plan. db_config : dict, optional Dictionary of database connection parameters. Parameters for connecting to fatboy.phys.washington.edu from a whitelisted machine will be used. logger : logging.logger, optional Logger object. """ self.gen = ObservationMetaDataGenerator(database=opsim_db, driver='sqlite') if db_config is not None: self.db_config = db_config else: self.db_config = dict(database='LSSTCATSIM', port=1433, host='fatboy.phys.washington.edu', driver='mssql+pymssql') if logger is None: logging.basicConfig(format="%(message)s", level=logging.INFO, stream=sys.stdout) logger = logging.getLogger() self.logger = logger def make_instance_catalog(self, obsHistID, band, boundLength, outfile=None): """ Method to create instance catalogs. Parameters ---------- obsHistID : int obsHistID for the desired visit from the opsim db file. band : str Desired LSST filter to use, ugrizy. boundLength : float Radius in degrees of sky cone in which to produce objects. outfile : str, optional File name of the instance catalog to be produced. If None, a default name will be generated, e.g., phosim_input_0000230_r_0.3deg.txt. """ if outfile is None: outfile = 'phosim_input_%07i_%s_%.1fdeg.txt' % (obsHistID, band, boundLength) obs_md = self.gen.getObservationMetaData(obsHistID=obsHistID, boundLength=boundLength)[0] do_header = True for objid in self.star_objs: self.logger.info("processing %s", objid) db_obj = CatalogDBObject.from_objid(objid, **self.db_config) phosim_object = PhoSimCatalogPoint(db_obj, obs_metadata=obs_md) if do_header: with open(outfile, 'w') as file_obj: phosim_object.write_header(file_obj) do_header = False phosim_object.write_catalog(outfile, write_mode='a', write_header=False, chunk_size=20000) for objid in self.gal_objs: self.logger.info("processing %s", objid) db_obj = CatalogDBObject.from_objid(objid, **self.db_config) phosim_object = PhoSimCatalogSersic2D(db_obj, obs_metadata=obs_md) phosim_object.write_catalog(outfile, write_mode='a', write_header=False, chunk_size=20000)
bulge_db = GalaxyBulgeObj(connection=star_db.connection) disk_db = GalaxyDiskObj(connection=star_db.connection) agn_db = GalaxyAgnObj(connection=star_db.connection) if not os.path.exists(out_dir): os.mkdir(out_dir) phosim_header_map = copy.deepcopy(DefaultPhoSimHeaderMap) phosim_header_map['nsnap'] = 1 phosim_header_map['vistime'] = 30.0 phosim_header_map['camconfig'] = 1 for obshistid in obshistid_list: obs_list = obs_generator.getObservationMetaData(obsHistID=obshistid, boundType='circle', boundLength=args.fov) obs = obs_list[0] if dither_switch: print 'dithering' obs.pointingRA = np.degrees(obs.OpsimMetaData['randomDitherFieldPerVisitRA']) obs.pointingDec = np.degrees(obs.OpsimMetaData['randomDitherFieldPerVisitDec']) rotSky = _getRotSkyPos(obs._pointingRA, obs._pointingDec, obs, obs.OpsimMetaData['ditheredRotTelPos']) obs.rotSkyPos = np.degrees(rotSky) obs.OpsimMetaData['rotTelPos'] = obs.OpsimMetaData['ditheredRotTelPos'] cat_name = os.path.join(out_dir,'phosim_cat_%d.txt' % obshistid) star_name = 'star_cat_%d.txt' % obshistid
def setUpClass(cls): print('setting up %s' % sims_clean_up.targets) cls.camera = obs_lsst_phosim.PhosimMapper().camera # These represent the dimmest magnitudes at which objects # are considered visible in each of the LSST filters # (taken from Table 2 of the overview paper) cls.obs_mag_cutoff = (23.68, 24.89, 24.43, 24.0, 24.45, 22.60) cls.opsim_db = os.path.join(getPackageDir('sims_data'), 'OpSimData', 'opsimblitz1_1133_sqlite.db') rng = np.random.RandomState(8123) obs_gen = ObservationMetaDataGenerator(database=cls.opsim_db) cls.obs_list = obs_gen.getObservationMetaData(night=(0, 2)) cls.obs_list = rng.choice(cls.obs_list, 10, replace=False) fieldid_list = [] for obs in cls.obs_list: fieldid_list.append(obs.OpsimMetaData['fieldID']) # make sure we have selected observations such that the # same field is revisited more than once assert len(np.unique(fieldid_list)) < len(fieldid_list) cls.input_dir = tempfile.mkdtemp(prefix='alertDataGen', dir=ROOT) cls.star_db_name = tempfile.mktemp(prefix='alertDataGen_star_db', dir=cls.input_dir, suffix='.db') conn = sqlite3.connect(cls.star_db_name) cursor = conn.cursor() cursor.execute('''CREATE TABLE stars (simobjid int, htmid int, ra real, dec real, umag real, gmag real, rmag real, imag real, zmag real, ymag real, px real, pmra real, pmdec real, vrad real, varParamStr text)''') conn.commit() n_stars = 10 cls.ra_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float) cls.dec_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float) u_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float) g_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float) r_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float) i_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float) z_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float) y_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float) cls.px_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float) cls.pmra_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float) cls.pmdec_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float) cls.vrad_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float) cls.amp_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float) cls.period_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float) id_offset = -n_stars for obs in cls.obs_list: id_offset += n_stars ra_0 = obs.pointingRA dec_0 = obs.pointingDec rr = rng.random_sample(n_stars) theta = rng.random_sample(n_stars)*2.0*np.pi ra = ra_0 + rr*np.cos(theta) dec = dec_0 + rr*np.sin(theta) var_period = rng.random_sample(n_stars)*0.25 var_amp = rng.random_sample(n_stars)*1.0 + 0.01 subset = rng.randint(0, high=len(var_amp)-1, size=3) var_amp[subset[:2]] = 0.0 var_amp[subset[-1]] = -1.0 umag = rng.random_sample(n_stars)*5.0 + 15.0 gmag = rng.random_sample(n_stars)*5.0 + 15.0 rmag = rng.random_sample(n_stars)*5.0 + 15.0 imag = rng.random_sample(n_stars)*5.0 + 15.0 zmag = rng.random_sample(n_stars)*5.0 + 15.0 ymag = rng.random_sample(n_stars)*5.0 + 15.0 px = rng.random_sample(n_stars)*0.1 # say it is arcsec pmra = rng.random_sample(n_stars)*50.0+100.0 # say it is arcsec/yr pmdec = rng.random_sample(n_stars)*50.0+100.0 # say it is arcsec/yr vrad = rng.random_sample(n_stars)*600.0 - 300.0 subset = rng.randint(0, high=n_stars-1, size=3) umag[subset] = 40.0 gmag[subset] = 40.0 rmag[subset] = 40.0 imag[subset] = 40.0 zmag[subset] = 40.0 ymag[subset] = 40.0 cls.ra_truth[id_offset:id_offset+n_stars] = np.round(ra, decimals=6) cls.dec_truth[id_offset:id_offset+n_stars] = np.round(dec, decimals=6) u_truth[id_offset:id_offset+n_stars] = np.round(umag, decimals=4) g_truth[id_offset:id_offset+n_stars] = np.round(gmag, decimals=4) r_truth[id_offset:id_offset+n_stars] = np.round(rmag, decimals=4) i_truth[id_offset:id_offset+n_stars] = np.round(imag, decimals=4) z_truth[id_offset:id_offset+n_stars] = np.round(zmag, decimals=4) y_truth[id_offset:id_offset+n_stars] = np.round(ymag, decimals=4) cls.px_truth[id_offset:id_offset+n_stars] = np.round(px, decimals=4) cls.pmra_truth[id_offset:id_offset+n_stars] = np.round(pmra, decimals=4) cls.pmdec_truth[id_offset:id_offset+n_stars] = np.round(pmdec, decimals=4) cls.vrad_truth[id_offset:id_offset+n_stars] = np.round(vrad, decimals=4) cls.amp_truth[id_offset:id_offset+n_stars] = np.round(var_amp, decimals=4) cls.period_truth[id_offset:id_offset+n_stars] = np.round(var_period, decimals=4) cls.max_str_len = -1 for i_star in range(n_stars): if var_amp[i_star] >= -0.1: varParamStr = ('{"m":"alert_test", "p":{"amp":%.4f, "per": %.4f}}' % (var_amp[i_star], var_period[i_star])) else: varParamStr = 'None' if len(varParamStr) > cls.max_str_len: cls.max_str_len = len(varParamStr) htmid = findHtmid(ra[i_star], dec[i_star], 21) query = ('''INSERT INTO stars VALUES(%d, %d, %.6f, %.6f, %.4f, %.4f, %.4f, %.4f, %.4f, %.4f, %.4f, %.4f, %.4f, %.4f, '%s')''' % (i_star+id_offset+1, htmid, ra[i_star], dec[i_star], umag[i_star], gmag[i_star], rmag[i_star], imag[i_star], zmag[i_star], ymag[i_star], px[i_star], pmra[i_star], pmdec[i_star], vrad[i_star], varParamStr)) cursor.execute(query) conn.commit() conn.close() cls.output_dir = tempfile.mkdtemp(dir=ROOT, prefix='alert_gen_output') cls.mag0_truth_dict = {} cls.mag0_truth_dict[0] = u_truth cls.mag0_truth_dict[1] = g_truth cls.mag0_truth_dict[2] = r_truth cls.mag0_truth_dict[3] = i_truth cls.mag0_truth_dict[4] = z_truth cls.mag0_truth_dict[5] = y_truth
class testGalSimAgn(GalSimAgn): bandpassNames = ['u', 'g'] #defined in galSimInterface/galSimUtilities.py PSF = SNRdocumentPSF() #If you want to use the LSST camera, uncomment the line below. #You can similarly assign any camera object you want here #camera = LsstSimMapper().camera #select an OpSim pointing opsimdb = os.path.join(getPackageDir('sims_data'), 'OpSimData', 'opsimblitz1_1133_sqlite.db') obs_gen = ObservationMetaDataGenerator(database=opsimdb) obs_list = obs_gen.getObservationMetaData(obsHistID=10, boundLength=0.05) obs_metadata = obs_list[0] #grab a database of galaxies (in this case, galaxy bulges) stars = CatalogDBObject.from_objid('allstars') #now append a bunch of objects with 2D sersic profiles to our output file stars_galSim = testGalSimStars(stars, obs_metadata=obs_metadata) catName = 'galSim_compound_example.txt' stars_galSim.write_catalog(catName, chunk_size=100) print('done with stars') bulges = CatalogDBObject.from_objid('galaxyBulge') bulge_galSim = testGalSimGalaxies(bulges, obs_metadata=obs_metadata)
args = parser.parse_args() if args.out_dir is None: raise RuntimeError('must specify out_dir') if args.log_file is None: raise RuntimeError('must specify log file') if os.path.exists(args.log_file): raise RuntimeError('%s already exists' % args.log_file) if not os.path.exists(args.out_dir): os.mkdir(args.out_dir) # get the list of ObservationMetaData to simulate obs_gen = ObservationMetaDataGenerator(args.opsim_db, driver='sqlite') obs_list = obs_gen.getObservationMetaData(night=(args.night0,args.night1)) del obs_gen sims_clean_up() gc.collect() # get the list of trixel htmids to simulate alert_gen = AlertDataGenerator() alert_gen.subdivide_obs(obs_list, htmid_level=6) n_tot_obs=0 for htmid in alert_gen.htmid_list: n_tot_obs += alert_gen.n_obs(htmid) with open(args.log_file, 'a') as out_file: for htmid in alert_gen.htmid_list:
###### t = time.time() # Get opsim data. opsdb = '/Users/lynnej/opsim/db/minion_1016_newsky.db' generator = ObservationMetaDataGenerator(database=opsdb, driver='sqlite') night = 203 query = 'select min(expMJD), max(expMJD) from summary where night=%d and filter="r"' %(night) res = generator.opsimdb.execute_arbitrary(query) expMJD_min = res[0][0] expMJD_max = res[0][1] # Test image (deep, r band, near ecliptic) obsMetaDataResults = generator.getObservationMetaData(expMJD=(expMJD_min, expMJD_max), boundLength=2.2) dt, t = dtime(t) print('To query opsim database: %f seconds' %(dt)) write_header = True write_mode = 'w' #ssmObj = NEOObj() ssmObj = SolarSystemObj() for obs in obsMetaDataResults: #print obs.mjd, obs.unrefractedRA, obs.unrefractedDec, obs.bandpass, obs.boundType, obs.boundLength mySsmDb = ssmCatCamera(ssmObj, obs_metadata = obs) photParams = PhotometricParameters(exptime = obs.OpsimMetaData['visitExpTime'], nexp=1, bandpass=obs.bandpass)
def test_ssm_catalog_creation(self): t = time.time() # Fake opsim data. database = os.path.join(getPackageDir('SIMS_DATA'), 'OpSimData/opsimblitz1_1133_sqlite.db') generator = ObservationMetaDataGenerator(database=database, driver='sqlite') night = 20 query = 'select min(expMJD), max(expMJD) from summary where night=%d' % (night) res = generator.opsimdb.execute_arbitrary(query) expMJD_min = res[0][0] expMJD_max = res[0][1] obsMetaDataResults = generator.getObservationMetaData(expMJD=(expMJD_min, expMJD_max), limit=3, boundLength=2.2) dt, t = dtime(t) print('To query opsim database: %f seconds' % (dt)) write_header = True write_mode = 'w' try: ssmObj = SolarSystemObj() for obsMeta in obsMetaDataResults: # But moving objects databases are not currently complete for all years. # Push forward to night=747. # (note that we need the phosim dictionary as well) newMJD = 59590.2 # this MJD is artificially chosen to be in the # time span of the new baseline simulated survey obs = ObservationMetaData(mjd=newMJD, pointingRA=obsMeta.pointingRA, pointingDec=obsMeta.pointingDec, bandpassName=obsMeta.bandpass, rotSkyPos=obsMeta.rotSkyPos, m5=obsMeta.m5[obsMeta.bandpass], seeing=obsMeta.seeing[obsMeta.bandpass], boundLength=obsMeta.boundLength, boundType=obsMeta.boundType) obs._OpsimMetaData = {'visitExpTime': 30} mySsmDb = ssmCatCamera(ssmObj, obs_metadata = obs) photParams = PhotometricParameters(exptime = obs.OpsimMetaData['visitExpTime'], nexp=1, bandpass=obs.bandpass) mySsmDb.photParams = photParams try: with lsst.utils.tests.getTempFilePath('.txt') as output_cat: mySsmDb.write_catalog(output_cat, write_header=write_header, write_mode=write_mode) # verify that we did not write an empty catalog with open(output_cat, 'r') as input_file: lines = input_file.readlines() msg = 'MJD is %.3f' % obs.mjd.TAI self.assertGreater(len(lines), 1, msg=msg) except: # This is because the solar system object 'tables' # don't actually connect to tables on fatboy; they just # call methods stored on fatboy. Therefore, the connection # failure will not be noticed until this part of the test msg = sys.exc_info()[1].args[0] if 'DB-Lib error' in msg: reassure() continue else: raise write_mode = 'a' write_header = False dt, t = dtime(t) print('To query solar system objects: %f seconds (obs MJD time %f)' % (dt, obs.mjd.TAI)) except: trace = traceback.extract_tb(sys.exc_info()[2], limit=20) msg = sys.exc_info()[1].args[0] if 'Failed to connect' in msg or failedOnFatboy(trace): # if the exception was because of a failed connection # to fatboy, ignore it. reassure() pass else: raise
bulge_db = GalaxyBulgeObj(connection=star_db.connection) disk_db = GalaxyDiskObj(connection=star_db.connection) agn_db = GalaxyAgnObj(connection=star_db.connection) if not os.path.exists(out_dir): os.mkdir(out_dir) phosim_header_map = copy.deepcopy(DefaultPhoSimHeaderMap) phosim_header_map['nsnap'] = 1 phosim_header_map['vistime'] = 30.0 phosim_header_map['camconfig'] = 1 for obshistid in obshistid_list: obs_list = obs_generator.getObservationMetaData(obsHistID=obshistid, boundType='circle', boundLength=args.fov) obs = obs_list[0] if dither_switch: print 'dithering' obs.pointingRA = np.degrees( obs.OpsimMetaData['randomDitherFieldPerVisitRA']) obs.pointingDec = np.degrees( obs.OpsimMetaData['randomDitherFieldPerVisitDec']) rotSky = _getRotSkyPos(obs._pointingRA, obs._pointingDec, obs, obs.OpsimMetaData['ditheredRotTelPos']) obs.rotSkyPos = np.degrees(rotSky) obs.OpsimMetaData['rotTelPos'] = obs.OpsimMetaData[ 'ditheredRotTelPos']
import pickle import os import time from lsst.sims.catUtils.utils import ObservationMetaDataGenerator opsim_dir = '/global/projecta/projectdirs/lsst/groups/SSim/DC2' opsim_file = os.path.join(opsim_dir, 'minion_1016_desc_dithered_v4.db') assert os.path.isfile(opsim_file) out_file = os.path.join(os.environ['SCRATCH'], 'minion_1016_desc_dithered_dict.p') obs_gen = ObservationMetaDataGenerator(opsim_file) t_start = time.time() obs_md = obs_gen.getObservationMetaData(boundLength=2.1, boundType='circle', obsHistID=(-10, 1000000000)) print('getting records took %e' % (time.time() - t_start)) out_dict = {} t_start = time.time() for obs in obs_md: out_dict[obs.OpsimMetaData['obsHistID']] = obs with open(out_file, 'wb') as out_file: pickle.dump(out_dict, out_file) print('output took %e' % (time.time() - t_start))
### Building Light Curves from Instance Catalogs # We would like to create a number of supernova instance catalogs and then build the light curves from the catalogs. To do this correctly, we would like to use the `observation_metadata` associated with a number of conscutive OpSIM pointings. # In[34]: opsimPath = os.path.join(eups.productDir('sims_data'),'OpSimData') opsimDB = os.path.join(opsimPath,'opsimblitz1_1133_sqlite.db') # from Tuscon AHM notebook from Scott # This OPSIM DB is provided in sims_data. This creates a list of opsim pointings # that I have checked. This is a tuned notebook generator = ObservationMetaDataGenerator() #database = opsimPath, driver='sqlite') obsMetaDataResults = generator.getObservationMetaData(limit=100, fieldRA=(5.0, 8.0), fieldDec=(-85.,-60.), expMJD=(49300., 49400.), boundLength=0.015, boundType='circle') # In[35]: # How many pointings do we have? print len(obsMetaDataResults) # What are the RA, DEC values of the pointings? And how do they compare with the positions of galaxies we created in the database? # In[36]: def coords(x):
args = parser.parse_args() if args.out_dir is None: raise RuntimeError('must specify out_dir') if args.log_file is None: raise RuntimeError('must specify log file') if os.path.exists(args.log_file): raise RuntimeError('%s already exists' % args.log_file) if not os.path.exists(args.out_dir): os.mkdir(args.out_dir) # get the list of ObservationMetaData to simulate obs_gen = ObservationMetaDataGenerator(args.opsim_db, driver='sqlite') obs_list = obs_gen.getObservationMetaData(night=(args.night0, args.night1)) del obs_gen sims_clean_up() gc.collect() # get the list of trixel htmids to simulate alert_gen = AlertDataGenerator() alert_gen.subdivide_obs(obs_list, htmid_level=6) n_tot_obs = 0 for htmid in alert_gen.htmid_list: n_tot_obs += alert_gen.n_obs(htmid) with open(args.log_file, 'a') as out_file: for htmid in alert_gen.htmid_list:
def test_date_range(self): """ Run test_stellar_light_curves, this time specifying a range in MJD. """ raRange = (0.0, 110.0) decRange = (-90.0, -50.0) bandpass = '******' mjdRange = (49356.0, 49357.0) lc_gen = StellarLightCurveGenerator(self.stellar_db, self.opsimDb) pointings = lc_gen.get_pointings(raRange, decRange, bandpass=bandpass, expMJD=mjdRange) test_light_curves, truth_info = lc_gen.light_curves_from_pointings(pointings) self.assertGreater(len(test_light_curves), 2) for unique_id in test_light_curves: # verify that the sources returned all do vary by making sure that the # np.diff run on the magnitudes reutrns something non-zero self.assertGreater(np.abs(np.diff(test_light_curves[unique_id][bandpass]['mag'])).max(), 0.0) self.assertGreater(len(test_light_curves[unique_id][bandpass]['mjd']), 0) self.assertGreater(test_light_curves[unique_id][bandpass]['mjd'].min(), mjdRange[0]-1.0e-12) self.assertLess(test_light_curves[unique_id][bandpass]['mjd'].max(), mjdRange[1]+1.0e-12) # Now test that specifying a small chunk_size does not change the output # light curves chunk_light_curves, truth_info = lc_gen.light_curves_from_pointings(pointings, chunk_size=1) self.assertGreater(len(chunk_light_curves), 2) for unique_id in test_light_curves: self.assertEqual(len(test_light_curves[unique_id][bandpass]['mjd']), len(chunk_light_curves[unique_id][bandpass]['mjd'])) np.testing.assert_array_equal(test_light_curves[unique_id][bandpass]['mjd'], chunk_light_curves[unique_id][bandpass]['mjd']) np.testing.assert_array_equal(test_light_curves[unique_id][bandpass]['mag'], chunk_light_curves[unique_id][bandpass]['mag']) np.testing.assert_array_equal(test_light_curves[unique_id][bandpass]['error'], chunk_light_curves[unique_id][bandpass]['error']) # Now find all of the ObservationMetaData that were included in our # light curves, generate InstanceCatalogs from them separately, # and verify that the contents of the InstanceCatalogs agree with # the contents of the light curves. gen = ObservationMetaDataGenerator(database=self.opsimDb, driver='sqlite') obs_list = gen.getObservationMetaData(fieldRA=raRange, fieldDec=decRange, telescopeFilter=bandpass, expMJD=mjdRange, boundLength=1.75) ct = 0 for obs in obs_list: cat = stellarControlCatalog(self.stellar_db, obs_metadata=obs) for star_obj in cat.iter_catalog(): ct += 1 lc = test_light_curves[star_obj[0]][bandpass] dex = np.argmin(np.abs(lc['mjd']-obs.mjd.TAI)) self.assertLess(np.abs(lc['mjd'][dex]-obs.mjd.TAI), 1.0e-7) self.assertLess(np.abs(lc['mag'][dex]-star_obj[3]), 1.0e-7) self.assertLess(np.abs(lc['error'][dex]-star_obj[4]), 1.0e-7) # Verify that the same number of objects and observations were found in the # catalogs and the LightCurveGenerator output total_ct = 0 for obj_name in test_light_curves: for bandpass in test_light_curves[obj_name]: total_ct += len(test_light_curves[obj_name][bandpass]['mjd']) self.assertEqual(ct, total_ct)