def tearDownClass(cls): sims_clean_up() del cls.camera if os.path.exists(cls.dbFileName): os.unlink(cls.dbFileName) if os.path.exists(cls.scratchDir): shutil.rmtree(cls.scratchDir)
def test_mlt_clean_up(self): """ Test that the MLT cache is correctly loaded after sims_clean_up is called. """ db = MLT_test_DB(database=self.db_name, driver='sqlite') obs = ObservationMetaData(mjd=60000.0) cat = FlaringCatalog(db, obs_metadata=obs) cat._mlt_lc_file = self.mlt_lc_name cat_name_1 = os.path.join(self.scratch_dir,'mlt_clean_test_cat_1.txt') cat.write_catalog(cat_name_1) sims_clean_up() # re-generate the same catalog and verify that its # contents are unchanged db = MLT_test_DB(database=self.db_name, driver='sqlite') obs = ObservationMetaData(mjd=60000.0) cat = FlaringCatalog(db, obs_metadata=obs) cat._mlt_lc_file = self.mlt_lc_name cat_name_2 = os.path.join(self.scratch_dir,'mlt_clean_test_cat_2.txt') cat.write_catalog(cat_name_2) with open(cat_name_1, 'r') as in_file_1: lines_1 = in_file_1.readlines() with open(cat_name_2, 'r') as in_file_2: lines_2 = in_file_2.readlines() self.assertGreater(len(lines_1), 1) self.assertEqual(len(lines_1), len(lines_2)) for line in lines_1: self.assertIn(line, lines_2) if os.path.exists(cat_name_1): os.unlink(cat_name_1) if os.path.exists(cat_name_2): os.unlink(cat_name_2)
def tearDownClass(cls): sims_clean_up() if os.path.exists(cls.mlt_lc_name): os.unlink(cls.mlt_lc_name) if os.path.exists(cls.db_name): os.unlink(cls.db_name)
def test_cache(self): """ Test that EBVbase() only loads each dust map once """ sims_clean_up() self.assertEqual(len(EBVbase._ebv_map_cache), 0) ebv1 = EBVbase() ebv1.load_ebvMapNorth() ebv1.load_ebvMapSouth() self.assertEqual(len(EBVbase._ebv_map_cache), 2) ebv2 = EBVbase() ebv2.load_ebvMapNorth() ebv2.load_ebvMapSouth() self.assertEqual(len(EBVbase._ebv_map_cache), 2) rng = np.random.RandomState(881) ra_list = rng.random_sample(10) * 2.0 * np.pi dec_list = rng.random_sample(10) * np.pi - 0.5 * np.pi ebv1_vals = ebv1.calculateEbv( equatorialCoordinates=np.array([ra_list, dec_list])) ebv2_vals = ebv2.calculateEbv( equatorialCoordinates=np.array([ra_list, dec_list])) self.assertEqual(len(EBVbase._ebv_map_cache), 2) np.testing.assert_array_equal(ebv1_vals, ebv2_vals)
def test_cache(self): """ Test that EBVbase() only loads each dust map once """ sims_clean_up() self.assertEqual(len(EBVbase._ebv_map_cache), 0) ebv1 = EBVbase() ebv1.load_ebvMapNorth() ebv1.load_ebvMapSouth() self.assertEqual(len(EBVbase._ebv_map_cache), 2) ebv2 = EBVbase() ebv2.load_ebvMapNorth() ebv2.load_ebvMapSouth() self.assertEqual(len(EBVbase._ebv_map_cache), 2) rng = np.random.RandomState(881) ra_list = rng.random_sample(10)*2.0*np.pi dec_list = rng.random_sample(10)*np.pi - 0.5*np.pi ebv1_vals = ebv1.calculateEbv(equatorialCoordinates=np.array([ra_list, dec_list])) ebv2_vals = ebv2.calculateEbv(equatorialCoordinates=np.array([ra_list, dec_list])) self.assertEqual(len(EBVbase._ebv_map_cache), 2) np.testing.assert_array_equal(ebv1_vals, ebv2_vals)
def tearDown(self): sims_clean_up() del self.baselineOutput if os.path.exists('cannotBeNullTest.db'): os.unlink('cannotBeNullTest.db') if os.path.exists(self.scratch_dir): shutil.rmtree(self.scratch_dir)
def tearDownClass(cls): sims_clean_up() if os.path.exists(cls.dbName): os.unlink(cls.dbName) del cls.dbName del cls.dbSize
def tearDownClass(cls): sims_clean_up() if os.path.exists(cls.dbName): os.unlink(cls.dbName) del cls.dbName del cls.dbSize
def tearDownClass(cls): sims_clean_up() if os.path.exists(cls.starDBName): os.unlink(cls.starDBName) if os.path.exists(cls.galDBName): os.unlink(cls.galDBName)
def test_mlt_clean_up(self): """ Test that the MLT cache is correctly loaded after sims_clean_up is called. """ db = MLT_test_DB(database=self.db_name, driver='sqlite') obs = ObservationMetaData(mjd=60000.0) cat = FlaringCatalog(db, obs_metadata=obs) cat._mlt_lc_file = self.mlt_lc_name cat_name_1 = os.path.join(self.scratch_dir,'mlt_clean_test_cat_1.txt') cat.write_catalog(cat_name_1) sims_clean_up() # re-generate the same catalog and verify that its # contents are unchanged db = MLT_test_DB(database=self.db_name, driver='sqlite') obs = ObservationMetaData(mjd=60000.0) cat = FlaringCatalog(db, obs_metadata=obs) cat._mlt_lc_file = self.mlt_lc_name cat_name_2 = os.path.join(self.scratch_dir,'mlt_clean_test_cat_2.txt') cat.write_catalog(cat_name_2) with open(cat_name_1, 'r') as in_file_1: lines_1 = in_file_1.readlines() with open(cat_name_2, 'r') as in_file_2: lines_2 = in_file_2.readlines() self.assertGreater(len(lines_1), 1) self.assertEqual(len(lines_1), len(lines_2)) for line in lines_1: self.assertIn(line, lines_2) if os.path.exists(cat_name_1): os.unlink(cat_name_1) if os.path.exists(cat_name_2): os.unlink(cat_name_2)
def test_clean_up(self): """ Test that sims_clean_up behaves as it should by importing a test module with some dummy caches, adding things to them, and then deleting them. """ from testModules.dummyModule import a_dict_cache from testModules.dummyModule import a_list_cache from lsst.sims.utils.CodeUtilities import sims_clean_up self.assertEqual(len(sims_clean_up.targets), 2) a_dict_cache['a'] = 1 a_dict_cache['b'] = 2 a_list_cache.append('alpha') a_list_cache.append('beta') self.assertEqual(len(a_dict_cache), 2) self.assertEqual(len(a_list_cache), 2) sims_clean_up() self.assertEqual(len(a_dict_cache), 0) self.assertEqual(len(a_list_cache), 0) self.assertEqual(len(sims_clean_up.targets), 2) # make sure that re-importing caches does not add second copies # to sims_clean_up.targets from testModules.dummyModule import a_list_cache self.assertEqual(len(sims_clean_up.targets), 2)
def tearDownClass(cls): sims_clean_up() if os.path.exists(cls.starTextName): os.unlink(cls.starTextName) for file_name in os.listdir(cls.scratch_dir): os.unlink(os.path.join(cls.scratch_dir, file_name)) if os.path.exists(cls.scratch_dir): shutil.rmtree(cls.scratch_dir)
def tearDownClass(cls): sims_clean_up() del cls.bulgeDB del cls.diskDB del cls.agnDB del cls.starDB if os.path.exists(cls.dbName): os.unlink(cls.dbName)
def tearDownClass(cls): sims_clean_up() if os.path.exists(cls.txt_name): os.unlink(cls.txt_name) if os.path.exists(cls.mlt_lc_file_name): os.unlink(cls.mlt_lc_file_name) if os.path.exists(cls.scratchDir): shutil.rmtree(cls.scratchDir)
def tearDownClass(cls): sims_clean_up() if os.path.exists(cls.star_db_name): os.unlink(cls.star_db_name) if os.path.exists(cls.input_dir): shutil.rmtree(cls.input_dir) clean_up_lsst_camera()
def tearDownClass(cls): sims_clean_up() if os.path.exists(cls.table1FileName): os.unlink(cls.table1FileName) if os.path.exists(cls.table2FileName): os.unlink(cls.table2FileName) if os.path.exists(cls.dbName): os.unlink(cls.dbName)
def tearDownClass(cls): sims_clean_up() if os.path.exists(cls.star_db_name): os.unlink(cls.star_db_name) if os.path.exists(cls.input_dir): shutil.rmtree(cls.input_dir) clean_up_lsst_camera()
def tearDownClass(cls): sims_clean_up() del cls.testSpecDir del cls.testKDir del cls.testMLTDir del cls.testWDDir del cls.kmTestName del cls.mTestName
def tearDownClass(cls): sims_clean_up() if os.path.exists(cls.star_db_name): os.unlink(cls.star_db_name) if os.path.exists(cls.input_dir): shutil.rmtree(cls.input_dir) for file_name in os.listdir(cls.output_dir): os.unlink(os.path.join(cls.output_dir, file_name)) shutil.rmtree(cls.output_dir)
def tearDownClass(cls): sims_clean_up() del cls.testSpecDir del cls.testKDir del cls.testMLTDir del cls.testWDDir del cls.kmTestName del cls.mTestName
def tearDownClass(cls): sims_clean_up() if os.path.exists(cls.starDBName): os.unlink(cls.starDBName) if os.path.exists(cls.galDBName): os.unlink(cls.galDBName) if os.path.exists(cls.scratch_dir): shutil.rmtree(cls.scratch_dir)
def tearDownClass(cls): sims_clean_up() if os.path.exists(cls.starDBName): os.unlink(cls.starDBName) if os.path.exists(cls.galDBName): os.unlink(cls.galDBName) if os.path.exists(cls.scratch_dir): shutil.rmtree(cls.scratch_dir)
def tearDownClass(cls): sims_clean_up() if os.path.exists(cls.table1FileName): os.unlink(cls.table1FileName) if os.path.exists(cls.table2FileName): os.unlink(cls.table2FileName) if os.path.exists(cls.dbName): os.unlink(cls.dbName) if os.path.exists(cls.scratch_dir): shutil.rmtree(cls.scratch_dir)
def tearDownClass(cls): sims_clean_up() if os.path.exists(cls.textFileName): os.unlink(cls.textFileName) if os.path.exists(cls.dbName): os.unlink(cls.dbName) if os.path.exists(cls.baseDir): shutil.rmtree(cls.baseDir)
def tearDownClass(cls): sims_clean_up() if hasattr(chipNameFromPupilCoordsLSST, '_detector_arr'): del chipNameFromPupilCoordsLSST._detector_arr if hasattr(focalPlaneCoordsFromPupilCoordsLSST, '_z_fitter'): del focalPlaneCoordsFromPupilCoordsLSST._z_fitter if hasattr(pupilCoordsFromFocalPlaneCoordsLSST, '_z_fitter'): del pupilCoordsFromFocalPlaneCoordsLSST._z_fitter if hasattr(lsst_camera, '_lsst_camera'): del lsst_camera._lsst_camera
def tearDownClass(cls): sims_clean_up() del cls.db if os.path.exists(cls.db_src_name): os.unlink(cls.db_src_name) if os.path.exists(cls.scratch_dir): shutil.rmtree(cls.scratch_dir)
def tearDownClass(cls): sims_clean_up() del cls.db if os.path.exists(cls.db_src_name): os.unlink(cls.db_src_name) if os.path.exists(cls.scratch_dir): shutil.rmtree(cls.scratch_dir)
def tearDownClass(cls): sims_clean_up() if os.path.exists(cls.mlt_lc_name): os.unlink(cls.mlt_lc_name) if os.path.exists(cls.db_name): os.unlink(cls.db_name) if os.path.exists(cls.scratch_dir): shutil.rmtree(cls.scratch_dir, ignore_errors=True)
def tearDownClass(cls): sims_clean_up() if os.path.exists(cls.dbName): os.unlink(cls.dbName) del cls.dbName del cls.driver del cls.obs_metadata del cls.bandpassNameList del cls.m5 del cls.seeing
def tearDownClass(cls): sims_clean_up() if os.path.exists(cls.dbName): os.unlink(cls.dbName) del cls.dbName del cls.driver del cls.host del cls.obs_metadata del cls.totalBandpasses del cls.hardwareBandpasses del cls.skySeds
def tearDownClass(cls): sims_clean_up() if os.path.exists(cls.star_db_name): os.unlink(cls.star_db_name) if os.path.exists(cls.input_dir): shutil.rmtree(cls.input_dir) for file_name in os.listdir(cls.output_dir): os.unlink(os.path.join(cls.output_dir, file_name)) shutil.rmtree(cls.output_dir) clean_up_lsst_camera()
def tearDownClass(cls): sims_clean_up() if os.path.exists(cls.mlt_lc_name): os.unlink(cls.mlt_lc_name) if os.path.exists(cls.db_name): os.unlink(cls.db_name) if os.path.exists(cls.dummy_lc_name): os.unlink(cls.dummy_lc_name) if os.path.exists(cls.scratch_dir): shutil.rmtree(cls.scratch_dir)
def tearDownClass(cls): sims_clean_up() del cls.galDB cls.cleanDB(cls.dbname) if os.path.exists(cls.valName): os.unlink(cls.valName) for fname in cls.fnameList: if os.path.exists(fname): os.unlink(fname) if os.path.exists(cls.fullCatalog): os.unlink(cls.fullCatalog)
def tearDownClass(cls): sims_clean_up() if os.path.exists(cls.bulge_name): os.unlink(cls.bulge_name) if os.path.exists(cls.disk_name): os.unlink(cls.disk_name) if os.path.exists(cls.agn_name): os.unlink(cls.agn_name) if os.path.exists(cls.star_name): os.unlink(cls.star_name)
def tearDownClass(cls): sims_clean_up() if os.path.exists(cls.dbName): os.unlink(cls.dbName) if os.path.exists(cls.scratch_dir): shutil.rmtree(cls.scratch_dir) del cls.dbName del cls.driver del cls.obs_metadata del cls.bandpassNameList del cls.m5 del cls.seeing del cls.camera
def tearDownClass(cls): sims_clean_up() del cls.galDB cls.cleanDB(cls.dbname) if os.path.exists(cls.valName): os.unlink(cls.valName) for fname in cls.fnameList: if os.path.exists(fname): os.unlink(fname) if os.path.exists(cls.fullCatalog): os.unlink(cls.fullCatalog) if os.path.exists(cls.scratchDir): shutil.rmtree(cls.scratchDir, ignore_errors=True)
def tearDownClass(cls): sims_clean_up() del cls.galDB cls.cleanDB(cls.dbname) if os.path.exists(cls.valName): os.unlink(cls.valName) for fname in cls.fnameList: if os.path.exists(fname): os.unlink(fname) if os.path.exists(cls.fullCatalog): os.unlink(cls.fullCatalog) if os.path.exists(cls.scratchDir): shutil.rmtree(cls.scratchDir)
def tearDownClass(cls): sims_clean_up() del cls.camera if os.path.exists(cls.bulge_name): os.unlink(cls.bulge_name) if os.path.exists(cls.disk_name): os.unlink(cls.disk_name) if os.path.exists(cls.agn_name): os.unlink(cls.agn_name) if os.path.exists(cls.star_name): os.unlink(cls.star_name) if os.path.exists(cls.dataDir): shutil.rmtree(cls.dataDir)
def tearDownClass(cls): sims_clean_up() if os.path.exists(cls.fake_db_name): os.unlink(cls.fake_db_name) if os.path.exists(cls.scratch_space): shutil.rmtree(cls.scratch_space)
def tearDownClass(cls): sims_clean_up() if os.path.exists(cls.variability_db): os.unlink(cls.variability_db) if os.path.exists(cls.scratch_dir): shutil.rmtree(cls.scratch_dir)
def test_ParametrizedLightCurve_in_catalog(self): """ Test the performance of applyParametrizedLightCurve() in the context of an InstanceCatalog """ # Create dummy light curve parameters lc_temp_file_name = tempfile.mktemp(prefix='test_ParametrizedLightCurve_in_catalog', suffix='.gz') rng = np.random.RandomState(1621145) n_c_1 = 10 a1_list = rng.random_sample(n_c_1)*5.0 b1_list = (rng.random_sample(n_c_1)-0.5)*2.0 c1_list = (rng.random_sample(n_c_1)-0.5)*0.1 omega1_list = rng.random_sample(n_c_1)*20.0 tau1_list = rng.random_sample(n_c_1)*100.0 median1 = 100.0 n_c_2 = 15 a2_list = rng.random_sample(n_c_2)*5.0 b2_list = (rng.random_sample(n_c_2)-0.5)*2.0 c2_list = (rng.random_sample(n_c_2)-0.5)*0.1 omega2_list = rng.random_sample(n_c_2)*20.0 tau2_list = rng.random_sample(n_c_2)*100.0 median2 = 200.0 with gzip.open(lc_temp_file_name, 'w') as out_file: out_file.write(b'# a header\n') out_file.write(b'kplr999990000_lc.txt 100 1.0e+02 %d ' % n_c_1) for i_c in range(n_c_1): out_file.write(b'%e ' % (1.0/(i_c+1))) out_file.write(b'%e ' % median1) for i_c in range(n_c_1): out_file.write(b'%.15e %.15e %.15e %.15e %.15e ' % (a1_list[i_c], b1_list[i_c], c1_list[i_c], omega1_list[i_c], tau1_list[i_c])) out_file.write(b'\n') out_file.write(b'kplr999990001_lc.txt 100 1.0e+02 %d ' % n_c_2) for i_c in range(n_c_2): out_file.write(b'%e ' % (1.0/(i_c+1))) out_file.write(b'%e ' % median2) for i_c in range(n_c_2): out_file.write(b'%.15e %.15e %.15e %.15e %.15e ' % (a2_list[i_c], b2_list[i_c], c2_list[i_c], omega2_list[i_c], tau2_list[i_c])) out_file.write(b'\n') # Create dummy database of astrophysical sources db_temp_file_name = tempfile.mktemp(prefix='test_ParametrizedLightCurve_in_catalog_db', suffix='.txt') lc_list = [999990001, None, 999990001, 999990000] t0_list = [1729.1, None, 2345.1, 10.9] with open(db_temp_file_name, 'w') as out_file: out_file.write('# a header\n') for i_obj in range(len(lc_list)): if lc_list[i_obj] is not None: paramStr = '{"m":"kplr", "p":{"lc":%d, "t0":%.3f}}' % (lc_list[i_obj], t0_list[i_obj]) else: paramStr = None out_file.write('%d;10.0;20.0;0.01;0.01;%s\n' % (i_obj, paramStr)) dtype = np.dtype([('simobjid', int), ('ra', float), ('dec', float), ('ebv', float), ('parallax', float), ('varParamStr', str, 100)]) db = fileDBObject(db_temp_file_name, runtable='test', dtype=dtype, delimiter=';', idColKey='simobjid') class ParametrizedVarParamStrCat(InstanceCatalog, VariabilityStars): column_outputs = ['simobjid', 'delta_lsst_u', 'delta_lsst_g', 'delta_lsst_r', 'delta_lsst_i', 'delta_lsst_z', 'delta_lsst_y'] default_formats = {'f':'%.15g'} obs = ObservationMetaData(mjd=59580.0) cat = ParametrizedVarParamStrCat(db, obs_metadata=obs) cat.load_parametrized_light_curves(lc_temp_file_name) cat_out_name = tempfile.mktemp(prefix='test_ParametrizedLightCurve_in_cat_out', suffix='.txt') cat.write_catalog(cat_out_name) kp = ParametrizedLightCurveMixin() cat_dtype = np.dtype([('simobjid', int), ('du', float), ('dg', float), ('dr', float), ('di', float), ('dz', float), ('dy', float)]) cat_data = np.genfromtxt(cat_out_name, dtype=cat_dtype, delimiter=', ') for i_obj in range(len(cat_data)): obj_id = cat_data['simobjid'][i_obj] if lc_list[obj_id] is None: self.assertEqual(cat_data['du'][i_obj], 0.0) self.assertEqual(cat_data['dg'][i_obj], 0.0) self.assertEqual(cat_data['dr'][i_obj], 0.0) self.assertEqual(cat_data['di'][i_obj], 0.0) self.assertEqual(cat_data['dz'][i_obj], 0.0) self.assertEqual(cat_data['dy'][i_obj], 0.0) else: q_flux, d_flux = kp._calc_dflux(lc_list[obj_id], obs.mjd.TAI-t0_list[obj_id]) d_mag_true = -2.5*np.log10(1.0+d_flux/q_flux) self.assertGreater(np.abs(d_mag_true), 0.0001) self.assertAlmostEqual(cat_data['du'][i_obj], d_mag_true, 15) self.assertAlmostEqual(cat_data['dg'][i_obj], d_mag_true, 15) self.assertAlmostEqual(cat_data['dr'][i_obj], d_mag_true, 15) self.assertAlmostEqual(cat_data['di'][i_obj], d_mag_true, 15) self.assertAlmostEqual(cat_data['dz'][i_obj], d_mag_true, 15) self.assertAlmostEqual(cat_data['dy'][i_obj], d_mag_true, 15) if os.path.exists(cat_out_name): os.unlink(cat_out_name) if os.path.exists(db_temp_file_name): os.unlink(db_temp_file_name) sims_clean_up() if os.path.exists(lc_temp_file_name): os.unlink(lc_temp_file_name)
def tearDownClass(cls): sims_clean_up() if os.path.exists(cls.variability_db): os.unlink(cls.variability_db) if os.path.exists(cls.scratch_dir): shutil.rmtree(cls.scratch_dir)
def test_ParametrizedLightCurve_in_catalog(self): """ Test the performance of applyParametrizedLightCurve() in the context of an InstanceCatalog """ # Create dummy light curve parameters lc_temp_file_name = tempfile.mktemp( prefix='test_ParametrizedLightCurve_in_catalog', suffix='.gz') rng = np.random.RandomState(1621145) n_c_1 = 10 a1_list = rng.random_sample(n_c_1) * 5.0 b1_list = (rng.random_sample(n_c_1) - 0.5) * 2.0 c1_list = (rng.random_sample(n_c_1) - 0.5) * 0.1 omega1_list = rng.random_sample(n_c_1) * 20.0 tau1_list = rng.random_sample(n_c_1) * 100.0 median1 = 100.0 n_c_2 = 15 a2_list = rng.random_sample(n_c_2) * 5.0 b2_list = (rng.random_sample(n_c_2) - 0.5) * 2.0 c2_list = (rng.random_sample(n_c_2) - 0.5) * 0.1 omega2_list = rng.random_sample(n_c_2) * 20.0 tau2_list = rng.random_sample(n_c_2) * 100.0 median2 = 200.0 with gzip.open(lc_temp_file_name, 'w') as out_file: out_file.write(b'# a header\n') out_file.write(b'kplr999990000_lc.txt 100 1.0e+02 %d ' % n_c_1) for i_c in range(n_c_1): out_file.write(b'%e ' % (1.0 / (i_c + 1))) out_file.write(b'%e ' % median1) for i_c in range(n_c_1): out_file.write(b'%.15e %.15e %.15e %.15e %.15e ' % (a1_list[i_c], b1_list[i_c], c1_list[i_c], omega1_list[i_c], tau1_list[i_c])) out_file.write(b'\n') out_file.write(b'kplr999990001_lc.txt 100 1.0e+02 %d ' % n_c_2) for i_c in range(n_c_2): out_file.write(b'%e ' % (1.0 / (i_c + 1))) out_file.write(b'%e ' % median2) for i_c in range(n_c_2): out_file.write(b'%.15e %.15e %.15e %.15e %.15e ' % (a2_list[i_c], b2_list[i_c], c2_list[i_c], omega2_list[i_c], tau2_list[i_c])) out_file.write(b'\n') # Create dummy database of astrophysical sources db_temp_file_name = tempfile.mktemp( prefix='test_ParametrizedLightCurve_in_catalog_db', suffix='.txt') lc_list = [999990001, None, 999990001, 999990000] t0_list = [1729.1, None, 2345.1, 10.9] with open(db_temp_file_name, 'w') as out_file: out_file.write('# a header\n') for i_obj in range(len(lc_list)): if lc_list[i_obj] is not None: paramStr = '{"m":"kplr", "p":{"lc":%d, "t0":%.3f}}' % ( lc_list[i_obj], t0_list[i_obj]) else: paramStr = None out_file.write('%d;10.0;20.0;0.01;0.01;%s\n' % (i_obj, paramStr)) dtype = np.dtype([('simobjid', int), ('ra', float), ('dec', float), ('ebv', float), ('parallax', float), ('varParamStr', str, 100)]) db = fileDBObject(db_temp_file_name, runtable='test', dtype=dtype, delimiter=';', idColKey='simobjid') class ParametrizedVarParamStrCat(InstanceCatalog, VariabilityStars): column_outputs = [ 'simobjid', 'delta_lsst_u', 'delta_lsst_g', 'delta_lsst_r', 'delta_lsst_i', 'delta_lsst_z', 'delta_lsst_y' ] default_formats = {'f': '%.15g'} obs = ObservationMetaData(mjd=59580.0) cat = ParametrizedVarParamStrCat(db, obs_metadata=obs) cat.load_parametrized_light_curves(lc_temp_file_name) cat_out_name = tempfile.mktemp( prefix='test_ParametrizedLightCurve_in_cat_out', suffix='.txt') cat.write_catalog(cat_out_name) kp = ParametrizedLightCurveMixin() cat_dtype = np.dtype([('simobjid', int), ('du', float), ('dg', float), ('dr', float), ('di', float), ('dz', float), ('dy', float)]) cat_data = np.genfromtxt(cat_out_name, dtype=cat_dtype, delimiter=', ') for i_obj in range(len(cat_data)): obj_id = cat_data['simobjid'][i_obj] if lc_list[obj_id] is None: self.assertEqual(cat_data['du'][i_obj], 0.0) self.assertEqual(cat_data['dg'][i_obj], 0.0) self.assertEqual(cat_data['dr'][i_obj], 0.0) self.assertEqual(cat_data['di'][i_obj], 0.0) self.assertEqual(cat_data['dz'][i_obj], 0.0) self.assertEqual(cat_data['dy'][i_obj], 0.0) else: q_flux, d_flux = kp._calc_dflux(lc_list[obj_id], obs.mjd.TAI - t0_list[obj_id]) d_mag_true = -2.5 * np.log10(1.0 + d_flux / q_flux) self.assertGreater(np.abs(d_mag_true), 0.0001) self.assertAlmostEqual(cat_data['du'][i_obj], d_mag_true, 15) self.assertAlmostEqual(cat_data['dg'][i_obj], d_mag_true, 15) self.assertAlmostEqual(cat_data['dr'][i_obj], d_mag_true, 15) self.assertAlmostEqual(cat_data['di'][i_obj], d_mag_true, 15) self.assertAlmostEqual(cat_data['dz'][i_obj], d_mag_true, 15) self.assertAlmostEqual(cat_data['dy'][i_obj], d_mag_true, 15) if os.path.exists(cat_out_name): os.unlink(cat_out_name) if os.path.exists(db_temp_file_name): os.unlink(db_temp_file_name) sims_clean_up() if os.path.exists(lc_temp_file_name): os.unlink(lc_temp_file_name)
def tearDownClass(cls): sims_clean_up() if os.path.exists(cls.input_cat_name): os.unlink(cls.input_cat_name) if os.path.exists(cls.scratchDir): shutil.rmtree(cls.scratchDir)
def tearDownClass(cls): sims_clean_up() del cls.testSpecDir
def tearDownClass(self): sims_clean_up()
def tearDownClass(cls): sims_clean_up()
def tearDownClass(cls): sims_clean_up() if os.path.exists(cls.db_name): os.unlink(cls.db_name) clean_up_lsst_camera()
def test_calc_dflux(self): """ Test the method that calculates the flux of parametrized light curves by generating a fake light curve library with known parameters, calculating the fluxes, and comparing to the expected results. """ lc_temp_file_name = tempfile.mktemp(prefix='test_calc_dflux_lc', suffix='.gz') rng = np.random.RandomState(7124) n_c_1 = 10 a1_list = rng.random_sample(n_c_1)*5.0 b1_list = (rng.random_sample(n_c_1)-0.5)*2.0 c1_list = (rng.random_sample(n_c_1)-0.5)*0.1 omega1_list = rng.random_sample(n_c_1)*20.0 tau1_list = rng.random_sample(n_c_1)*100.0 median1 = 100.0 n_c_2 = 15 a2_list = rng.random_sample(n_c_2)*5.0 b2_list = (rng.random_sample(n_c_2)-0.5)*2.0 c2_list = (rng.random_sample(n_c_2)-0.5)*0.1 omega2_list = rng.random_sample(n_c_2)*20.0 tau2_list = rng.random_sample(n_c_2)*100.0 median2 = 200.0 with gzip.open(lc_temp_file_name, 'w') as out_file: out_file.write(b'# a header\n') out_file.write(b'kplr990000000_lc.txt 100 1.0e+02 %d ' % n_c_1) for i_c in range(n_c_1): out_file.write(b'%e ' % (1.0/(i_c+1))) out_file.write(b'%e ' % median1) for i_c in range(n_c_1): out_file.write(b'%.15e %.15e %.15e %.15e %.15e ' % (a1_list[i_c], b1_list[i_c], c1_list[i_c], omega1_list[i_c], tau1_list[i_c])) out_file.write(b'\n') out_file.write(b'kplr990000001_lc.txt 100 1.0e+02 %d ' % n_c_2) for i_c in range(n_c_2): out_file.write(b'%e ' % (1.0/(i_c+1))) out_file.write(b'%e ' % median2) for i_c in range(n_c_2): out_file.write(b'%.15e %.15e %.15e %.15e %.15e ' % (a2_list[i_c], b2_list[i_c], c2_list[i_c], omega2_list[i_c], tau2_list[i_c])) out_file.write(b'\n') expmjd = rng.random_sample(100)*200.0 kp = ParametrizedLightCurveMixin() kp.load_parametrized_light_curves(lc_temp_file_name) q_flux, d_flux = kp._calc_dflux(990000000, expmjd) self.assertAlmostEqual(q_flux, median1+c1_list.sum(), 10) true_flux = np.zeros(len(expmjd)) for i_c in range(n_c_1): arg = omega1_list[i_c]*(expmjd-tau1_list[i_c]) true_flux += a1_list[i_c]*np.cos(arg) true_flux += b1_list[i_c]*np.sin(arg) self.assertEqual(len(d_flux), len(true_flux)) np.testing.assert_allclose(d_flux, true_flux, rtol=0.0, atol=1.0e-10) q_flux, d_flux = kp._calc_dflux(990000001, expmjd) self.assertAlmostEqual(q_flux, median2+c2_list.sum(), 10) true_flux = np.zeros(len(expmjd)) for i_c in range(n_c_2): arg = omega2_list[i_c]*(expmjd-tau2_list[i_c]) true_flux += a2_list[i_c]*np.cos(arg) true_flux += b2_list[i_c]*np.sin(arg) self.assertEqual(len(d_flux), len(true_flux)) np.testing.assert_allclose(d_flux, true_flux, rtol=0.0, atol=1.0e-10) sims_clean_up() if os.path.exists(lc_temp_file_name): os.unlink(lc_temp_file_name)
def tearDownClass(cls): sims_clean_up() if os.path.exists(cls.opsim_db_name): os.unlink(cls.opsim_db_name)
def tearDownClass(cls): sims_clean_up() if os.path.exists(cls.txt_cat_name): os.unlink(cls.txt_cat_name)
def tearDownClass(cls): sims_clean_up()
def test_applyParametrizedLightCurve_manyExpmjd(self): """ test applyParametrizedLightCurve on an array of expmjd values by creating a dummy light curve file with known parameters, generating magnitudes, and comparing to the expected outputs. We will use _calc_dflux() to calculate the known truth, since that method was tested in test_calc_dflux() """ lc_temp_file_name = tempfile.mktemp(prefix='test_applyParametrizedLightCurve_manyexpmjd', suffix='.gz') rng = np.random.RandomState(13291) n_c_1 = 10 a1_list = rng.random_sample(n_c_1)*5.0 b1_list = (rng.random_sample(n_c_1)-0.5)*2.0 c1_list = (rng.random_sample(n_c_1)-0.5)*0.1 omega1_list = rng.random_sample(n_c_1)*20.0 tau1_list = rng.random_sample(n_c_1)*100.0 median1 = 100.0 n_c_2 = 15 a2_list = rng.random_sample(n_c_2)*5.0 b2_list = (rng.random_sample(n_c_2)-0.5)*2.0 c2_list = (rng.random_sample(n_c_2)-0.5)*0.1 omega2_list = rng.random_sample(n_c_2)*20.0 tau2_list = rng.random_sample(n_c_2)*100.0 median2 = 200.0 with gzip.open(lc_temp_file_name, 'w') as out_file: out_file.write(b'# a header\n') out_file.write(b'kplr999900000_lc.txt 100 1.0e+02 %d ' % n_c_1) for i_c in range(n_c_1): out_file.write(b'%e ' % (1.0/(i_c+1))) out_file.write(b'%e ' % median1) for i_c in range(n_c_1): out_file.write(b'%.15e %.15e %.15e %.15e %.15e ' % (a1_list[i_c], b1_list[i_c], c1_list[i_c], omega1_list[i_c], tau1_list[i_c])) out_file.write(b'\n') out_file.write(b'kplr999900001_lc.txt 100 1.0e+02 %d ' % n_c_2) for i_c in range(n_c_2): out_file.write(b'%e ' % (1.0/(i_c+1))) out_file.write(b'%e ' % median2) for i_c in range(n_c_2): out_file.write(b'%.15e %.15e %.15e %.15e %.15e ' % (a2_list[i_c], b2_list[i_c], c2_list[i_c], omega2_list[i_c], tau2_list[i_c])) out_file.write(b'\n') params = {} params['lc'] = np.array([999900001, 999900000, None, 999900001]) params['t0'] = np.array([223.1, 1781.45, None, 32.0]) kp = ParametrizedLightCurveMixin() kp.load_parametrized_light_curves(lc_temp_file_name) # first test that passing in an empty set of params # results in an empty numpy array (so that the 'dry # run' of catalog generation does not fail) d_mag_out = kp.applyParametrizedLightCurve([],{},1.0) np.testing.assert_array_equal(d_mag_out, np.array([[],[],[],[],[],[]])) expmjd = rng.random_sample(10)*10000.0 + 59580.0 d_mag_out = kp.applyParametrizedLightCurve([], params, expmjd) self.assertEqual(d_mag_out.shape, (6, 4, 10)) for i_obj in range(4): if i_obj == 2: for i_filter in range(6): np.testing.assert_array_equal(d_mag_out[i_filter][i_obj], np.zeros(10)) else: q_flux, d_flux = kp._calc_dflux(params['lc'][i_obj], expmjd-params['t0'][i_obj]) d_mag_truth = -2.5*np.log10(1.0+d_flux/q_flux) nan_vals = np.where(np.isnan(d_mag_truth)) self.assertEqual(len(nan_vals[0]), 0) for i_filter in range(6): np.testing.assert_array_equal(d_mag_out[i_filter][i_obj], d_mag_truth) sims_clean_up() if os.path.exists(lc_temp_file_name): os.unlink(lc_temp_file_name)
def tearDownClass(cls): sims_clean_up() if os.path.exists(cls.testDB): os.unlink(cls.testDB)
def test_catalog_db_object_cacheing(self): """ Test that opening multiple CatalogDBObjects that connect to the same database only results in one connection being opened and used. We will test this by instantiating two CatalogDBObjects and a DBObject that connect to the same database. We will then test that the two CatalogDBObjects' connections are identical, but that the DBObject has its own connection. """ sims_clean_up() self.assertEqual(len(CatalogDBObject._connection_cache), 0) class DbClass1(CatalogDBObject): database = self.db_name port = None host = None driver = 'sqlite' tableid = 'test' idColKey = 'id' objid = 'test_db_class_1' columns = [('identification', 'id')] class DbClass2(CatalogDBObject): database = self.db_name port = None host = None driver = 'sqlite' tableid = 'test' idColKey = 'id' objid = 'test_db_class_2' columns = [('other', 'i1')] db1 = DbClass1() db2 = DbClass2() self.assertEqual(id(db1.connection), id(db2.connection)) self.assertEqual(len(CatalogDBObject._connection_cache), 1) db3 = DBObject(database=self.db_name, driver='sqlite', host=None, port=None) self.assertNotEqual(id(db1.connection), id(db3.connection)) self.assertEqual(len(CatalogDBObject._connection_cache), 1) # check that if we had passed db1.connection to a DBObject, # the connections would be identical db4 = DBObject(connection=db1.connection) self.assertEqual(id(db4.connection), id(db1.connection)) self.assertEqual(len(CatalogDBObject._connection_cache), 1) # verify that db1 and db2 are both useable results = db1.query_columns(colnames=['id', 'i1', 'i2', 'identification']) results = next(results) self.assertEqual(len(results), 5) np.testing.assert_array_equal(results['id'], list(range(5))) np.testing.assert_array_equal(results['id'], results['identification']) np.testing.assert_array_equal(results['id']**2, results['i1']) np.testing.assert_array_equal(results['id']*(-1), results['i2']) results = db2.query_columns(colnames=['id', 'i1', 'i2', 'other']) results = next(results) self.assertEqual(len(results), 5) np.testing.assert_array_equal(results['id'], list(range(5))) np.testing.assert_array_equal(results['id']**2, results['i1']) np.testing.assert_array_equal(results['i1'], results['other']) np.testing.assert_array_equal(results['id']*(-1), results['i2'])
def tearDownClass(cls): sims_clean_up() if os.path.exists(cls.scratch_dir): shutil.rmtree(cls.scratch_dir)
def tearDownClass(cls): sims_clean_up() if os.path.exists(cls.scratch_dir): shutil.rmtree(cls.scratch_dir, ignore_errors=True)
if args.out_dir is None: raise RuntimeError('must specify out_dir') if args.log_file is None: raise RuntimeError('must specify log file') if os.path.exists(args.log_file): raise RuntimeError('%s already exists' % args.log_file) if not os.path.exists(args.out_dir): os.mkdir(args.out_dir) # get the list of ObservationMetaData to simulate obs_gen = ObservationMetaDataGenerator(args.opsim_db, driver='sqlite') obs_list = obs_gen.getObservationMetaData(night=(args.night0,args.night1)) del obs_gen sims_clean_up() gc.collect() # get the list of trixel htmids to simulate alert_gen = AlertDataGenerator() alert_gen.subdivide_obs(obs_list, htmid_level=6) n_tot_obs=0 for htmid in alert_gen.htmid_list: n_tot_obs += alert_gen.n_obs(htmid) with open(args.log_file, 'a') as out_file: for htmid in alert_gen.htmid_list: out_file.write('htmid %d n_obs %d\n' % (htmid, alert_gen.n_obs(htmid))) out_file.write('n_htmid %d n_obs(total) %d\n' % (len(alert_gen.htmid_list), n_tot_obs))
def tearDownClass(cls): sims_clean_up() del cls.galDir del cls.filterList