示例#1
0
def make_refcat(opsim_db, obsHistID, boundLength, outfile,
                catsim_db_info=None, chunk_size=20000):
    """
    Create a reference catalog of stars to use for astrometry from the
    CatSim db tables.

    Parameters
    ----------
    opsim_db : str
        OpSim database sqlite file
    obsHistID : int
        Visit number to provide the center of the extraction region.
    boundLength : float
        Radius of the extraction region in units of degrees.
    outfile : str
        Filename for the reference catalog output file.
    catsim_db_info : dict, optional
        Connection information (host, port, database, driver) for the CatSim
        database.  Default: connection info for the UW fatboy server.
    chunk_size : int, optional
        The memory chunk size to pass to InstanceCatalog.write_catalog
    """
    if catsim_db_info is None:
        catsim_db_info = catsim_uw
    generator = ObservationMetaDataGenerator(database=opsim_db, driver='sqlite')
    obs_metadata = generator.getObservationMetaData(obsHistID=obsHistID,
                                                    boundLength=boundLength)[0]
    stars = CatalogDBObject.from_objid('allstars', **catsim_db_info)
    ref_stars = SimulationReference(stars, obs_metadata=obs_metadata)
    ref_stars.write_catalog(outfile, write_mode='w', write_header=True,
                            chunk_size=chunk_size)
示例#2
0
 def setUp(self):
     self.obsHistID = 1418971
     obs_gen = ObservationMetaDataGenerator(database=os.environ['OPSIMDB'],
                                            driver='sqlite')
     self.obs_md \
         = obs_gen.getObservationMetaData(obsHistID=self.obsHistID)[0]
     self.outfile = 'phosim_instcat_%i.txt' % self.obsHistID
示例#3
0
 def _read_pointing_info(self, opsim_db):
     try:
         self.ratel = self.eimage[0].header['RATEL']
         self.dectel = self.eimage[0].header['DECTEL']
         self.rotangle = self.eimage[0].header['ROTANGLE']
         return
     except KeyError:
         if opsim_db is None:
             raise RuntimeError("eimage file does not have pointing info. "
                                "Need an opsim db file.")
     # Read from the opsim db.
     # We need an ObservationMetaData object to use the getRotSkyPos
     # function.
     obs_gen = ObservationMetaDataGenerator(database=opsim_db,
                                            driver="sqlite")
     obs_md = obs_gen.getObservationMetaData(obsHistID=self.visit,
                                             boundType='circle',
                                             boundLength=0)[0]
     # Extract pointing info from opsim db for desired visit.
     conn = sqlite3.connect(opsim_db)
     query = """select descDitheredRA, descDitheredDec,
     descDitheredRotTelPos from summary where
     obshistid={}""".format(self.visit)
     curs = conn.execute(query)
     ra, dec, rottelpos = [np.degrees(x) for x in curs][0]
     conn.close()
     self.ratel, self.dectel = ra, dec
     obs_md.pointingRA = ra
     obs_md.pointingDec = dec
     self.rotangle = getRotSkyPos(ra, dec, obs_md, rottelpos)
示例#4
0
    def test_query(self):
        """
        Use ObservationMetaData to query an OpSim-like database that contains
        dithering columns.  Make sure that the dithering columns get carried
        over into the OpsimMetaData of the resulting ObservationMetaData.
        """

        gen = ObservationMetaDataGenerator(database=self.fake_db_name, driver='sqlite')
        obs_list = gen.getObservationMetaData(fieldRA=(0.0, 180.0))
        self.assertGreater(len(obs_list), 0)
        found_list = []
        for obs in obs_list:
            obsid = obs.OpsimMetaData['obsHistID']
            control_dict = self.db_control[obsid]
            self.assertAlmostEqual(obs._pointingRA, control_dict['ra'], 11)
            self.assertAlmostEqual(obs._pointingDec, control_dict['dec'], 11)
            self.assertAlmostEqual(obs._rotSkyPos, control_dict['rot'], 11)
            self.assertAlmostEqual(obs.OpsimMetaData['m5'], control_dict['m5'], 11)
            self.assertAlmostEqual(obs.OpsimMetaData['raTestDithering'], control_dict['raDith'], 11)
            self.assertAlmostEqual(obs.OpsimMetaData['decTestDithering'], control_dict['decDith'], 11)
            self.assertAlmostEqual(obs.mjd.TAI, control_dict['mjd'], 11)
            self.assertEqual(obs.bandpass, 'g')
            self.assertGreaterEqual(obs.pointingRA, 0.0)
            self.assertLessEqual(obs.pointingRA, 180.0)
            found_list.append(obs.OpsimMetaData['obsHistID'])

        # check that the entries not returned do, in fact, violate the query
        for ix in range(len(self.db_control)):
            if ix not in found_list:
                self.assertGreater(self.db_control[ix]['ra'], np.radians(180.0))
    def testCreationOfPhoSimCatalog_3(self):
        """
        Make sure that we can create PhoSim input catalogs using the returned
        ObservationMetaData.

        Test that an error is actually raised if we try to build a PhoSim catalog
        with a v3 header map using a v4 ObservationMetaData
        """

        dbName = tempfile.mktemp(dir=ROOT, prefix='obsMetaDataGeneratorTest-', suffix='.db')
        makePhoSimTestDB(filename=dbName)
        bulgeDB = testGalaxyBulgeDBObj(driver='sqlite', database=dbName)
        opsim_db = os.path.join(getPackageDir('sims_data'), 'OpSimData',
                                'astro-lsst-01_2014.db')
        assert os.path.isfile(opsim_db)
        gen = ObservationMetaDataGenerator(opsim_db, driver='sqlite')
        results = gen.getObservationMetaData(fieldRA=(70.0, 85.0),
                                             telescopeFilter='i')
        self.assertGreater(len(results), 0)
        testCat = PhoSimCatalogSersic2D(bulgeDB, obs_metadata=results[0])
        testCat.phoSimHeaderMap = DefaultPhoSimHeaderMap
        with lsst.utils.tests.getTempFilePath('.txt') as catName:
            with self.assertRaises(RuntimeError):
                testCat.write_catalog(catName)

        if os.path.exists(dbName):
            os.unlink(dbName)
    def setUp(self):

        dbPath = os.path.join(getPackageDir('sims_data'),
                              'OpSimData/opsimblitz1_1133_sqlite.db')

        self.gen = ObservationMetaDataGenerator(database=dbPath,
                                                driver='sqlite')
示例#7
0
    def __init__(self, opsim_db, db_config=None, logger=None):
        """
        Constructor.

        Parameters
        ----------
        opsim_db : str
            sqlite3 db file containing observing plan.

        db_config : dict, optional
            Dictionary of database connection parameters.  Parameters
            for connecting to fatboy.phys.washington.edu from a
            whitelisted machine will be used.

        logger : logging.logger, optional
            Logger object.
        """
        self.gen = ObservationMetaDataGenerator(database=opsim_db,
                                                driver='sqlite')
        if db_config is not None:
            self.db_config = db_config
        else:
            self.db_config = dict(database='LSSTCATSIM',
                                  port=1433,
                                  host='fatboy.phys.washington.edu',
                                  driver='mssql+pymssql')
        if logger is None:
            logging.basicConfig(format="%(message)s", level=logging.INFO,
                                stream=sys.stdout)
            logger = logging.getLogger()
        self.logger = logger
 def __init__(
     self,
     opsim_db='/global/projecta/projectdirs/lsst/groups/SSim/DC2/minion_1016_desc_dithered_v4.db'
 ):
     self.conn = sqlite3.connect(opsim_db)
     self.obs_gen = ObservationMetaDataGenerator(database=opsim_db,
                                                 driver='sqlite')
     self._cache = dict()
 def setUpClass(cls):
     opsimdb = os.path.join(getPackageDir('sims_data'), 'OpSimData',
                            'opsimblitz1_1133_sqlite.db')
     obs_gen = ObservationMetaDataGenerator(opsimdb)
     cls.obs_dict = {}
     for band in 'ugrizy':
         obs_list = obs_gen.getObservationMetaData(telescopeFilter=band, limit=10)
         assert len(obs_list) > 0
         cls.obs_dict[band] = obs_list[0]
示例#10
0
    def __init__(self, catalogdb, opsimdb, opsimdriver="sqlite"):
        self._generator = ObservationMetaDataGenerator(database=opsimdb,
                                                       driver=opsimdriver)

        self._catalogdb = catalogdb

        # optional constraint on query to catalog database
        # (usually 'varParamStr IS NOT NULL')
        if not hasattr(self, '_constraint'):
            self._constraint = None
示例#11
0
    def test_sne_multiband_light_curves(self):
        """
        Generate some super nova light curves.  Verify that they come up with the same
        magnitudes and uncertainties as supernova catalogs.  Use multiband light curves.
        """

        gen = SNIaLightCurveGenerator(self.db, self.opsimDb)

        raRange = (78.0, 85.0)
        decRange = (-69.0, -65.0)

        pointings = gen.get_pointings(raRange, decRange, bandpass=('r', 'z'))
        gen.sn_universe._midSurveyTime = 49000.0
        gen.sn_universe._snFrequency = 0.001
        self.assertGreater(len(pointings), 1)
        lc_dict, truth = gen.light_curves_from_pointings(pointings)
        self.assertGreater(len(lc_dict), 0)

        obs_gen = ObservationMetaDataGenerator(database=self.opsimDb, driver='sqlite')
        control_obs_r = obs_gen.getObservationMetaData(fieldRA=raRange, fieldDec=decRange,
                                                       telescopeFilter='r', boundLength=1.75)

        control_obs_z = obs_gen.getObservationMetaData(fieldRA=raRange, fieldDec=decRange,
                                                       telescopeFilter='z', boundLength=1.75)

        self.assertGreater(len(control_obs_r), 0)
        self.assertGreater(len(control_obs_z), 0)

        ct_r = 0
        for obs in control_obs_r:
            cat = SNIaLightCurveControlCatalog(self.db, obs_metadata=obs)
            for sn in cat.iter_catalog():
                if sn[1] > 0.0:
                    ct_r += 1
                    lc = lc_dict[sn[0]]['r']
                    dex = np.argmin(np.abs(lc['mjd'] - obs.mjd.TAI))
                    self.assertLess(np.abs(lc['mjd'][dex] - obs.mjd.TAI), 1.0e-7)
                    self.assertLess(np.abs(lc['flux'][dex] - sn[1]), 1.0e-7)
                    self.assertLess(np.abs(lc['error'][dex] - sn[2]), 1.0e-7)

        self.assertGreater(ct_r, 0)

        ct_z = 0
        for obs in control_obs_z:
            cat = SNIaLightCurveControlCatalog(self.db, obs_metadata=obs)
            for sn in cat.iter_catalog():
                if sn[1] > 0.0:
                    ct_z += 1
                    lc = lc_dict[sn[0]]['z']
                    dex = np.argmin(np.abs(lc['mjd'] - obs.mjd.TAI))
                    self.assertLess(np.abs(lc['mjd'][dex] - obs.mjd.TAI), 1.0e-7)
                    self.assertLess(np.abs(lc['flux'][dex] - sn[1]), 1.0e-7)
                    self.assertLess(np.abs(lc['error'][dex] - sn[2]), 1.0e-7)

        self.assertGreater(ct_z, 0)
示例#12
0
 def _set_obs_md_results(self, opsim_db, fieldRA, fieldDec, boundLength,
                         pickle_file):
     if pickle_file is not None and os.path.isfile(pickle_file):
         self.obs_md_results = pickle.load(open(pickle_file))
     else:
         # Generate the observation metadata from the db file.
         gen = ObservationMetaDataGenerator(database=opsim_db,
                                            driver='sqlite')
         self.obs_md_results = gen.getObservationMetaData(
             fieldRA=fieldRA, fieldDec=fieldDec, boundLength=boundLength)
         if pickle_file is not None:
             pickle.dump(self.obs_md_results, open(pickle_file, 'w'))
    def testOnNonExistentDatabase(self):
        """
        Test that an exception is raised if you try to connect to an query
        a database that does not exist.
        """

        test_name = 'non_existent.db'
        with self.assertRaises(RuntimeError) as context:
            ObservationMetaDataGenerator(database=test_name, driver='sqlite')

        self.assertEqual(context.exception.args[0],
                         '%s does not exist' % test_name)

        self.assertFalse(os.path.exists(test_name))
    def testPassInOtherQuery(self):
        """
        Test that you can pass OpSim pointings generated from another source
        into an ObservationMetaDataGenerator and still get ObservationMetaData
        out
        """

        pointing_list = self.gen.getOpSimRecords(fieldRA=np.degrees(1.370916))
        self.assertGreater(len(pointing_list), 1)
        local_gen = ObservationMetaDataGenerator()
        obs_list = local_gen.ObservationMetaDataFromPointingArray(pointing_list)
        self.assertEqual(len(obs_list), len(pointing_list))

        for pp in pointing_list:
            obs = local_gen.ObservationMetaDataFromPointing(pp)
            self.assertIsInstance(obs, ObservationMetaData)
    def testIncompletDB(self):
        """
        Test that if the mock OpSim database does not have all required columns, an exception
        is raised.
        """
        scratch_dir = os.path.join(getPackageDir('sims_catUtils'), 'tests',
                                   'scratchSpace')
        opsim_db_name = os.path.join(scratch_dir,
                                     'incomplete_mock_opsim_sqlite.db')

        if os.path.exists(opsim_db_name):
            os.unlink(opsim_db_name)

        conn = sqlite3.connect(opsim_db_name)
        c = conn.cursor()
        c.execute('''CREATE TABLE Summary (obsHistID int, expMJD real, '''
                  '''fieldRA real, filter text)''')
        conn.commit()

        rng = np.random.RandomState(77)
        n_pointings = 100
        ra_data = rng.random_sample(n_pointings) * 2.0 * np.pi
        mjd_data = rng.random_sample(n_pointings) * 1000.0 + 59580.0
        filter_dexes = rng.randint(0, 6, n_pointings)
        bands = ('u', 'g', 'r', 'i', 'z', 'y')
        filter_data = []
        for ii in filter_dexes:
            filter_data.append(bands[ii])

        for ii in range(n_pointings):
            cmd = '''INSERT INTO Summary VALUES(%i, %f, %f, '%s')''' % \
                  (ii, mjd_data[ii], ra_data[ii], filter_data[ii])
            c.execute(cmd)
        conn.commit()
        conn.close()

        incomplete_obs_gen = ObservationMetaDataGenerator(
            database=opsim_db_name)

        with self.assertRaises(RuntimeError) as context:
            incomplete_obs_gen.getObservationMetaData(telescopeFilter='r')
        self.assertIn(
            "ObservationMetaDataGenerator requires that the database",
            context.exception.args[0])

        if os.path.exists(opsim_db_name):
            os.unlink(opsim_db_name)
    def __init__(self, opsimdb, descqa_catalog, dither=True,
                 min_mag=10, minsource=100, proper_motion=False,
                 imsim_catalog=False):
        """
        Parameters
        ----------
        obsimdb: str
            OpSim db filename.
        descqa_catalog: str
            Name of the DESCQA galaxy catalog.
        dither: bool [True]
            Flag to enable the dithering included in the opsim db file.
        min_mag: float [10]
            Minimum value of the star magnitude at 500nm to include.
        minsource: int [100]
            Minimum number of objects for phosim.py to simulate a chip.
        proper_motion: bool [True]
            Flag to enable application of proper motion to stars.
        imsim_catalog: bool [False]
            Flag to write an imsim-style object catalog.
        """
        if not os.path.exists(opsimdb):
            raise RuntimeError('%s does not exist' % opsimdb)

        self.descqa_catalog = descqa_catalog
        self.dither = dither
        self.min_mag = min_mag
        self.minsource = minsource
        self.proper_motion = proper_motion
        self.imsim_catalog = imsim_catalog

        self.obs_gen = ObservationMetaDataGenerator(database=opsimdb,
                                                    driver='sqlite')

        self.star_db = StarObj(database='LSSTCATSIM',
                               host='fatboy.phys.washington.edu',
                               port=1433, driver='mssql+pymssql')

        self.instcats = get_instance_catalogs(imsim_catalog)
示例#17
0
    def test_spatial_query(self):
        """
        Test that spatial queries work
        """
        db_dir = os.path.join(getPackageDir('sims_data'), 'OpSimData')
        assert os.path.isdir(db_dir)
        db_file = os.path.join(db_dir, 'astro-lsst-01_2014.db')
        obs_gen = ObservationMetaDataGenerator(db_file)
        obs_list = obs_gen.getObservationMetaData(fieldRA=(20.0, 40.0),
                                                  fieldDec=(-30.0, -10.0))
        self.assertGreater(len(obs_list), 10)
        with sqlite3.connect(db_file) as conn:
            cursor = conn.cursor()
            query = '''SELECT observationId, fieldRA, fieldDec,
                       observationStartMJD, filter
                       FROM SummaryAllProps WHERE
                       fieldRA BETWEEN 20.0 AND 40.0 AND
                       fieldDec BETWEEN -30.0 AND -10.0
                       ORDER BY observationId'''
            control = cursor.execute(query).fetchall()
        self.assertEqual(len(control), len(obs_list))
        for ii in range(len(obs_list)):
            self.assertEqual(obs_list[ii].OpsimMetaData['observationId'],
                             int(control[ii][0]))

            self.assertAlmostEqual(obs_list[ii].pointingRA,
                                   float(control[ii][1]), 10)
            self.assertAlmostEqual(obs_list[ii].pointingDec,
                                   float(control[ii][2]), 10)
            self.assertAlmostEqual(obs_list[ii].mjd.TAI, float(control[ii][3]),
                                   7)
            self.assertEqual(obs_list[ii].bandpass, str(control[ii][4]))

            self.assertGreaterEqual(obs_list[ii].pointingRA, 20.0)
            self.assertLessEqual(obs_list[ii].pointingRA, 40.0)
            self.assertGreaterEqual(obs_list[ii].pointingDec, -30.0)
            self.assertLessEqual(obs_list[ii].pointingDec, -10.0)
示例#18
0
#  You should have received a copy of the GNU General Public License
#  along with this program; if not, write to the Free Software
#  Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
#  MA 02110-1301, USA.
#  
#  

import numpy as np
import pandas as pd

from lsst.sims.catUtils.utils import ObservationMetaDataGenerator
from lsst.sims.utils import angularSeparation
from collections import OrderedDict as Odict

dbname = '/global/projecta/projectdirs/lsst/groups/SSim/DC2/minion_1016_desc_dithered_v4.db'
ObsMetaData = ObservationMetaDataGenerator(database=dbname)

def main(ramax=58, ramin=56, decmin=-32, decmax=-31, t0=59215, tm=59945):
    res = ObsMetaData.getObservationMetaData(boundLength=2, boundType='circle', 
                                            fieldRA=(ramin-3, ramax+3), 
                                            fieldDec=(decmin-3, decmax+3), 
                                            expMJD=(t0, tm))

    parsed = [Odict(obsmd.summary['OpsimMetaData']) for obsmd in res \
                if obsmd.bandpass in ("g", "r", "i", "z", "y")]
    
    df = pd.DataFrame(parsed)
    X = df[['obsHistID', 'filter', 'FWHMeff', 'descDitheredRA', 
            'descDitheredDec', 'airmass', 'fiveSigmaDepth', 'expMJD']].copy()
    X.descDitheredRA = np.degrees(X.descDitheredRA)
    X.descDitheredDec = np.degrees(X.descDitheredDec)
import pickle
import os
import time
from lsst.sims.catUtils.utils import ObservationMetaDataGenerator

opsim_dir = '/global/projecta/projectdirs/lsst/groups/SSim/DC2'
opsim_file = os.path.join(opsim_dir, 'minion_1016_desc_dithered_v4.db')
assert os.path.isfile(opsim_file)

out_file = os.path.join(os.environ['SCRATCH'],
                        'minion_1016_desc_dithered_dict.p')

obs_gen = ObservationMetaDataGenerator(opsim_file)

t_start = time.time()
obs_md = obs_gen.getObservationMetaData(boundLength=2.1,
                                        boundType='circle',
                                        obsHistID=(-10, 1000000000))

print('getting records took %e' % (time.time() - t_start))

out_dict = {}

t_start = time.time()
for obs in obs_md:
    out_dict[obs.OpsimMetaData['obsHistID']] = obs

with open(out_file, 'wb') as out_file:
    pickle.dump(out_dict, out_file)
print('output took %e' % (time.time() - t_start))
    def __init__(self,
                 opsimdb,
                 descqa_catalog,
                 dither=True,
                 min_mag=10,
                 minsource=100,
                 proper_motion=False,
                 protoDC2_ra=0,
                 protoDC2_dec=0,
                 star_db_name=None,
                 sed_lookup_dir=None,
                 agn_db_name=None,
                 agn_threads=1,
                 sn_db_name=None,
                 sprinkler=False,
                 host_image_dir=None,
                 host_data_dir=None,
                 config_dict=None,
                 gzip_threads=3,
                 objects_to_skip=()):
        """
        Parameters
        ----------
        opsimdb: str
            OpSim db filename.
        descqa_catalog: str
            Name of the DESCQA galaxy catalog.
        dither: bool [True]
            Flag to enable the dithering included in the opsim db file.
        min_mag: float [10]
            Minimum value of the star magnitude at 500nm to include.
        minsource: int [100]
            Minimum number of objects for phosim.py to simulate a chip.
        proper_motion: bool [True]
            Flag to enable application of proper motion to stars.
        protoDC2_ra: float [0]
            Desired RA (J2000 degrees) of protoDC2 center.
        protoDC2_dec: float [0]
            Desired Dec (J2000 degrees) of protoDC2 center.
        star_db_name: str [None]
            Filename of the database containing stellar sources
        sed_lookup_dir: str [None]
            Directory where the SED lookup tables reside.
        agn_db_name: str [None]
            Filename of the agn parameter sqlite db file.
        agn_threads: int [1]
            Number of threads to use when simulating AGN variability
        sn_db_name: str [None]
            Filename of the supernova parameter sqlite db file.
        sprinkler: bool [False]
            Flag to enable the Sprinkler.
        host_image_dir: string
            The location of the FITS images of lensed AGN/SNe hosts produced by generate_lensed_hosts_***.py
        host_data_dir: string
            Location of csv file of lensed host data created by the sprinkler
        gzip_threads: int
            The number of gzip jobs that can be started in parallel after
            catalogs are written (default=3)
        objects_to_skip: set-like or list-like [()]
            Collection of object types to skip, e.g., stars, knots, bulges, disks, sne, agn
        """
        self.t_start = time.time()
        if not os.path.exists(opsimdb):
            raise RuntimeError('%s does not exist' % opsimdb)

        self.gzip_threads = gzip_threads

        # load the data for the parametrized light
        # curve stellar variability model into a
        # global cache
        plc = ParametrizedLightCurveMixin()
        plc.load_parametrized_light_curves()

        self.config_dict = config_dict if config_dict is not None else {}

        self.descqa_catalog = descqa_catalog
        self.dither = dither
        self.min_mag = min_mag
        self.minsource = minsource
        self.proper_motion = proper_motion
        self.protoDC2_ra = protoDC2_ra
        self.protoDC2_dec = protoDC2_dec

        self.phot_params = PhotometricParameters(nexp=1, exptime=30)
        self.bp_dict = BandpassDict.loadTotalBandpassesFromFiles()

        self.obs_gen = ObservationMetaDataGenerator(database=opsimdb,
                                                    driver='sqlite')

        if star_db_name is None:
            raise IOError("Need to specify star_db_name")

        if not os.path.isfile(star_db_name):
            raise IOError("%s is not a file\n" % star_db_name +
                          "(This is what you specified for star_db_name")

        self.star_db = DC2StarObj(database=star_db_name, driver='sqlite')

        self.sprinkler = sprinkler
        if self.sprinkler and not HAS_TWINKLES:
            raise RuntimeError("You are trying to enable the sprinkler; "
                               "but Twinkles cannot be imported")

        if not os.path.isdir(sed_lookup_dir):
            raise IOError("\n%s\nis not a dir" % sed_lookup_dir)
        self.sed_lookup_dir = sed_lookup_dir

        self._agn_threads = agn_threads
        if agn_db_name is not None:
            if os.path.exists(agn_db_name):
                self.agn_db_name = agn_db_name
            else:
                raise IOError("Path to Proto DC2 AGN database does not exist.")
        else:
            self.agn_db_name = None

        self.sn_db_name = None
        if sn_db_name is not None:
            if os.path.isfile(sn_db_name):
                self.sn_db_name = sn_db_name
            else:
                raise IOError("%s is not a file" % sn_db_name)

        if host_image_dir is None and self.sprinkler is not False:
            raise IOError(
                "Need to specify the name of the host image directory.")
        elif self.sprinkler is not False:
            if os.path.exists(host_image_dir):
                self.host_image_dir = host_image_dir
            else:
                raise IOError("Path to host image directory" +
                              "\n\n%s\n\n" % host_image_dir +
                              "does not exist.")

        if host_data_dir is None and self.sprinkler is not False:
            raise IOError(
                "Need to specify the name of the host data directory.")
        elif self.sprinkler is not False:
            if os.path.exists(host_data_dir):
                self.host_data_dir = host_data_dir
            else:
                raise IOError(
                    "Path to host data directory does not exist.\n\n",
                    "%s\n\n" % host_data_dir)

        self.instcats = get_instance_catalogs()
        object_types = 'stars knots bulges disks sprinkled hosts sne agn'.split(
        )
        if any([_ not in object_types for _ in objects_to_skip]):
            raise RuntimeError(f'objects_to_skip ({objects_to_skip}) '
                               'contains invalid object types')
        self.do_obj_type = {_: _ not in objects_to_skip for _ in object_types}
from lsst.sims.GalSimInterface import GalSimGalaxies, SNRdocumentPSF
from lsst.sims.GalSimInterface import LSSTCameraWrapper

#if you want to use the actual LSST camera
#from lsst.obs.lsstSim import LsstSimMapper


class testGalSimGalaxies(GalSimGalaxies):
    #only draw images for u and g bands (for speed)
    bandpassNames = ['u', 'g']

    PSF = SNRdocumentPSF()


#select an OpSim pointing
opsimdb = os.path.join(getPackageDir('sims_data'), 'OpSimData',
                       'opsimblitz1_1133_sqlite.db')
obs_gen = ObservationMetaDataGenerator(database=opsimdb, driver='sqlite')
obs_list = obs_gen.getObservationMetaData(obsHistID=10, boundLength=0.05)
obs_metadata = obs_list[0]

#grab a database of galaxies (in this case, galaxy bulges)
gals = CatalogDBObject.from_objid('galaxyBulge')

#now append a bunch of objects with 2D sersic profiles to our output file
galaxy_galSim = testGalSimGalaxies(gals, obs_metadata=obs_metadata)
galaxy_galSim.camera_wrapper = LSSTCameraWrapper()

galaxy_galSim.write_catalog('galSim_bulge_example.txt', chunk_size=10000)
galaxy_galSim.write_images(nameRoot='bulge')
    def setUpClass(cls):
        print('setting up %s' % sims_clean_up.targets)

        cls.camera = obs_lsst_phosim.PhosimMapper().camera

        # These represent the dimmest magnitudes at which objects
        # are considered visible in each of the LSST filters
        # (taken from Table 2 of the overview paper)
        cls.obs_mag_cutoff = (23.68, 24.89, 24.43, 24.0, 24.45, 22.60)

        cls.opsim_db = os.path.join(getPackageDir('sims_data'),
                                    'OpSimData',
                                    'opsimblitz1_1133_sqlite.db')

        rng = np.random.RandomState(8123)

        obs_gen = ObservationMetaDataGenerator(database=cls.opsim_db)
        cls.obs_list = obs_gen.getObservationMetaData(night=(0, 2))
        cls.obs_list = rng.choice(cls.obs_list, 10, replace=False)
        fieldid_list = []
        for obs in cls.obs_list:
            fieldid_list.append(obs.OpsimMetaData['fieldID'])

        # make sure we have selected observations such that the
        # same field is revisited more than once
        assert len(np.unique(fieldid_list)) < len(fieldid_list)

        cls.input_dir = tempfile.mkdtemp(prefix='alertDataGen',
                                         dir=ROOT)

        cls.star_db_name = tempfile.mktemp(prefix='alertDataGen_star_db',
                                           dir=cls.input_dir,
                                           suffix='.db')

        conn = sqlite3.connect(cls.star_db_name)
        cursor = conn.cursor()
        cursor.execute('''CREATE TABLE stars
                          (simobjid int, htmid int, ra real, dec real,
                           umag real, gmag real, rmag real,
                           imag real, zmag real, ymag real,
                           px real, pmra real, pmdec real,
                           vrad real, varParamStr text)''')
        conn.commit()

        n_stars = 10

        cls.ra_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float)
        cls.dec_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float)
        u_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float)
        g_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float)
        r_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float)
        i_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float)
        z_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float)
        y_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float)
        cls.px_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float)
        cls.pmra_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float)
        cls.pmdec_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float)
        cls.vrad_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float)
        cls.amp_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float)
        cls.period_truth = np.zeros(n_stars*len(cls.obs_list), dtype=float)

        id_offset = -n_stars
        for obs in cls.obs_list:
            id_offset += n_stars
            ra_0 = obs.pointingRA
            dec_0 = obs.pointingDec
            rr = rng.random_sample(n_stars)
            theta = rng.random_sample(n_stars)*2.0*np.pi
            ra = ra_0 + rr*np.cos(theta)
            dec = dec_0 + rr*np.sin(theta)
            var_period = rng.random_sample(n_stars)*0.25
            var_amp = rng.random_sample(n_stars)*1.0 + 0.01

            subset = rng.randint(0, high=len(var_amp)-1, size=3)
            var_amp[subset[:2]] = 0.0
            var_amp[subset[-1]] = -1.0

            umag = rng.random_sample(n_stars)*5.0 + 15.0
            gmag = rng.random_sample(n_stars)*5.0 + 15.0
            rmag = rng.random_sample(n_stars)*5.0 + 15.0
            imag = rng.random_sample(n_stars)*5.0 + 15.0
            zmag = rng.random_sample(n_stars)*5.0 + 15.0
            ymag = rng.random_sample(n_stars)*5.0 + 15.0
            px = rng.random_sample(n_stars)*0.1  # say it is arcsec
            pmra = rng.random_sample(n_stars)*50.0+100.0  # say it is arcsec/yr
            pmdec = rng.random_sample(n_stars)*50.0+100.0  # say it is arcsec/yr
            vrad = rng.random_sample(n_stars)*600.0 - 300.0

            subset = rng.randint(0, high=n_stars-1, size=3)
            umag[subset] = 40.0
            gmag[subset] = 40.0
            rmag[subset] = 40.0
            imag[subset] = 40.0
            zmag[subset] = 40.0
            ymag[subset] = 40.0

            cls.ra_truth[id_offset:id_offset+n_stars] = np.round(ra, decimals=6)
            cls.dec_truth[id_offset:id_offset+n_stars] = np.round(dec, decimals=6)
            u_truth[id_offset:id_offset+n_stars] = np.round(umag, decimals=4)
            g_truth[id_offset:id_offset+n_stars] = np.round(gmag, decimals=4)
            r_truth[id_offset:id_offset+n_stars] = np.round(rmag, decimals=4)
            i_truth[id_offset:id_offset+n_stars] = np.round(imag, decimals=4)
            z_truth[id_offset:id_offset+n_stars] = np.round(zmag, decimals=4)
            y_truth[id_offset:id_offset+n_stars] = np.round(ymag, decimals=4)
            cls.px_truth[id_offset:id_offset+n_stars] = np.round(px, decimals=4)
            cls.pmra_truth[id_offset:id_offset+n_stars] = np.round(pmra, decimals=4)
            cls.pmdec_truth[id_offset:id_offset+n_stars] = np.round(pmdec, decimals=4)
            cls.vrad_truth[id_offset:id_offset+n_stars] = np.round(vrad, decimals=4)
            cls.amp_truth[id_offset:id_offset+n_stars] = np.round(var_amp, decimals=4)
            cls.period_truth[id_offset:id_offset+n_stars] = np.round(var_period, decimals=4)

            cls.max_str_len = -1

            for i_star in range(n_stars):
                if var_amp[i_star] >= -0.1:
                    varParamStr = ('{"m":"alert_test", "p":{"amp":%.4f, "per": %.4f}}'
                                   % (var_amp[i_star], var_period[i_star]))
                else:
                    varParamStr = 'None'

                if len(varParamStr) > cls.max_str_len:
                    cls.max_str_len = len(varParamStr)

                htmid = findHtmid(ra[i_star], dec[i_star], 21)

                query = ('''INSERT INTO stars VALUES(%d, %d, %.6f, %.6f,
                                                    %.4f, %.4f, %.4f, %.4f, %.4f, %.4f,
                                                    %.4f, %.4f, %.4f, %.4f, '%s')'''
                         % (i_star+id_offset+1, htmid, ra[i_star], dec[i_star],
                            umag[i_star], gmag[i_star], rmag[i_star],
                            imag[i_star], zmag[i_star], ymag[i_star],
                            px[i_star], pmra[i_star], pmdec[i_star],
                            vrad[i_star], varParamStr))

                cursor.execute(query)
        conn.commit()
        conn.close()

        cls.output_dir = tempfile.mkdtemp(dir=ROOT, prefix='alert_gen_output')
        cls.mag0_truth_dict = {}
        cls.mag0_truth_dict[0] = u_truth
        cls.mag0_truth_dict[1] = g_truth
        cls.mag0_truth_dict[2] = r_truth
        cls.mag0_truth_dict[3] = i_truth
        cls.mag0_truth_dict[4] = z_truth
        cls.mag0_truth_dict[5] = y_truth
if __name__ == '__main__':
    parser = argparse.ArgumentParser(
        description='Generate the reference catalog')
    parser.add_argument('opsimDB', help='OpSim database sqlite file')
    parser.add_argument('-o',
                        '--outfile',
                        type=str,
                        default='twinkles_ref.txt',
                        help='Filename of output reference catalog')
    args = parser.parse_args()

    # you need to provide ObservationMetaDataGenerator with the connection
    # string to an OpSim output database.  This is the connection string
    # to a test database that comes when you install CatSim.
    generator = ObservationMetaDataGenerator(database=args.opsimDB,
                                             driver='sqlite')
    obsMetaDataResults = generator.getObservationMetaData(fieldRA=(53, 54),
                                                          fieldDec=(-29, -27),
                                                          boundLength=0.3)

    # First get the reference catalog
    stars = CatalogDBObject.from_objid('allstars')
    while True:
        try:
            ref_stars = TwinklesReference(stars,
                                          obs_metadata=obsMetaDataResults[0])
            break
        except RuntimeError:
            continue
    ref_stars.write_catalog(args.outfile,
                            write_mode='w',
示例#24
0
    def test_ssm_catalog_creation(self):

        t = time.time()
        # Fake opsim data.
        database = os.path.join(getPackageDir('SIMS_DATA'),
                                'OpSimData/opsimblitz1_1133_sqlite.db')
        generator = ObservationMetaDataGenerator(database=database,
                                                 driver='sqlite')

        night = 20
        query = 'select min(expMJD), max(expMJD) from summary where night=%d' % (
            night)
        res = generator.opsimdb.execute_arbitrary(query)
        expMJD_min = res[0][0]
        expMJD_max = res[0][1]

        obsMetaDataResults = generator.getObservationMetaData(
            expMJD=(expMJD_min, expMJD_max), limit=3, boundLength=2.2)

        dt, t = dtime(t)
        print('To query opsim database: %f seconds' % (dt))

        write_header = True
        write_mode = 'w'

        try:
            ssmObj = SolarSystemObj()

            for obsMeta in obsMetaDataResults:
                # But moving objects databases are not currently complete for all years.
                # Push forward to night=747.
                # (note that we need the phosim dictionary as well)

                newMJD = 59590.2  # this MJD is artificially chosen to be in the
                # time span of the new baseline simulated survey

                obs = ObservationMetaData(
                    mjd=newMJD,
                    pointingRA=obsMeta.pointingRA,
                    pointingDec=obsMeta.pointingDec,
                    bandpassName=obsMeta.bandpass,
                    rotSkyPos=obsMeta.rotSkyPos,
                    m5=obsMeta.m5[obsMeta.bandpass],
                    seeing=obsMeta.seeing[obsMeta.bandpass],
                    boundLength=obsMeta.boundLength,
                    boundType=obsMeta.boundType)

                obs._OpsimMetaData = {'visitExpTime': 30}

                mySsmDb = ssmCatCamera(ssmObj, obs_metadata=obs)
                photParams = PhotometricParameters(
                    exptime=obs.OpsimMetaData['visitExpTime'],
                    nexp=1,
                    bandpass=obs.bandpass)
                mySsmDb.photParams = photParams

                try:
                    with lsst.utils.tests.getTempFilePath(
                            '.txt') as output_cat:
                        mySsmDb.write_catalog(output_cat,
                                              write_header=write_header,
                                              write_mode=write_mode)

                        # verify that we did not write an empty catalog
                        with open(output_cat, 'r') as input_file:
                            lines = input_file.readlines()
                        msg = 'MJD is %.3f' % obs.mjd.TAI
                        self.assertGreater(len(lines), 1, msg=msg)
                except:
                    # This is because the solar system object 'tables'
                    # don't actually connect to tables on fatboy; they just
                    # call methods stored on fatboy.  Therefore, the connection
                    # failure will not be noticed until this part of the test
                    msg = sys.exc_info()[1].args[0]
                    if 'DB-Lib error' in msg:
                        reassure()
                        continue
                    else:
                        raise

                write_mode = 'a'
                write_header = False

                dt, t = dtime(t)
                print(
                    'To query solar system objects: %f seconds (obs MJD time %f)'
                    % (dt, obs.mjd.TAI))

        except:
            trace = traceback.extract_tb(sys.exc_info()[2], limit=20)
            msg = sys.exc_info()[1].args[0]
            if 'Failed to connect' in msg or failedOnFatboy(trace):
                # if the exception was because of a failed connection
                # to fatboy, ignore it.
                reassure()

                pass
            else:
                raise
                        help='Path to OpSim database used for survey cadence')

    args = parser.parse_args()

    if args.out_dir is None:
        raise RuntimeError('must specify out_dir')
    if args.log_file is None:
        raise RuntimeError('must specify log file')
    if os.path.exists(args.log_file):
        raise RuntimeError('%s already exists' % args.log_file)

    if not os.path.exists(args.out_dir):
        os.mkdir(args.out_dir)

    # get the list of ObservationMetaData to simulate
    obs_gen = ObservationMetaDataGenerator(args.opsim_db, driver='sqlite')
    obs_list = obs_gen.getObservationMetaData(night=(args.night0, args.night1))

    del obs_gen
    sims_clean_up()
    gc.collect()

    # get the list of trixel htmids to simulate
    alert_gen = AlertDataGenerator()
    alert_gen.subdivide_obs(obs_list, htmid_level=6)

    n_tot_obs = 0
    for htmid in alert_gen.htmid_list:
        n_tot_obs += alert_gen.n_obs(htmid)

    with open(args.log_file, 'a') as out_file:
示例#26
0
if __name__ == "__main__":

    parser = argparse.ArgumentParser(description=
                'Lensed AGN Instance Catalog Generator')
    parser.add_argument('--obs_db', type=str, help='path to the Opsim db')
    parser.add_argument('--obs_id', type=int, default=None,
                        help='obsHistID to generate InstanceCatalog for')
    parser.add_argument('--agn_truth_cat', type=str,
                        help='path to lensed AGN truth catalog')
    parser.add_argument('--file_out', type=str,
                        help='filename of instance catalog written')

    args = parser.parse_args()

    obs_gen = ObservationMetaDataGenerator(database=args.obs_db,
                                           driver='sqlite')

    agn_truth_db = create_engine('sqlite:///%s' % args.agn_truth_cat, echo=False)
    agn_truth_cat = pd.read_sql_table('lensed_agn', agn_truth_db)
    lensed_agn_ic = lensedAgnCat(agn_truth_cat)

    obs_md = get_obs_md(obs_gen, args.obs_id, 2, dither=True)
    obs_time = obs_md.mjd.TAI
    obs_filter = obs_md.bandpass
    print('Writing Instance Catalog for Visit: %i at MJD: %f in Bandpass: %s' % (args.obs_id,
                                                                                 obs_time,
                                                                                 obs_filter))
    d_mag = lensed_agn_ic.calc_agn_dmags(obs_time, obs_filter)
    lensed_agn_ic.output_instance_catalog(d_mag, args.file_out, obs_md)

class testGalSimAgn(GalSimAgn):
    bandpassNames = ['u', 'g']

    #defined in galSimInterface/galSimUtilities.py
    PSF = SNRdocumentPSF()

    #If you want to use the LSST camera, uncomment the line below.
    #You can similarly assign any camera object you want here
    #camera = LsstSimMapper().camera



#select an OpSim pointing
opsimdb = os.path.join(getPackageDir('sims_data'), 'OpSimData', 'opsimblitz1_1133_sqlite.db')
obs_gen = ObservationMetaDataGenerator(database=opsimdb)
obs_list = obs_gen.getObservationMetaData(obsHistID=10, boundLength=0.05)
obs_metadata = obs_list[0]

#grab a database of galaxies (in this case, galaxy bulges)
stars = CatalogDBObject.from_objid('allstars')

#now append a bunch of objects with 2D sersic profiles to our output file
stars_galSim = testGalSimStars(stars, obs_metadata=obs_metadata)

catName = 'galSim_compound_example.txt'
stars_galSim.write_catalog(catName, chunk_size=100)

print('done with stars')

bulges = CatalogDBObject.from_objid('galaxyBulge')
    def __init__(self,
                 opsimdb,
                 descqa_catalog,
                 dither=True,
                 min_mag=10,
                 minsource=100,
                 proper_motion=False,
                 imsim_catalog=False,
                 protoDC2_ra=0,
                 protoDC2_dec=0,
                 agn_db_name=None,
                 sprinkler=False):
        """
        Parameters
        ----------
        obsimdb: str
            OpSim db filename.
        descqa_catalog: str
            Name of the DESCQA galaxy catalog.
        dither: bool [True]
            Flag to enable the dithering included in the opsim db file.
        min_mag: float [10]
            Minimum value of the star magnitude at 500nm to include.
        minsource: int [100]
            Minimum number of objects for phosim.py to simulate a chip.
        proper_motion: bool [True]
            Flag to enable application of proper motion to stars.
        imsim_catalog: bool [False]
            Flag to write an imsim-style object catalog.
        protoDC2_ra: float [0]
            Desired RA (J2000 degrees) of protoDC2 center.
        protoDC2_dec: float [0]
            Desired Dec (J2000 degrees) of protoDC2 center.
        agn_db_name: str [None]
            Filename of the agn parameter sqlite db file.
        sprinkler: bool [False]
            Flag to enable the Sprinkler.
        """
        if not os.path.exists(opsimdb):
            raise RuntimeError('%s does not exist' % opsimdb)

        # load the data for the parametrized light
        # curve stellar variability model into a
        # global cache
        plc = ParametrizedLightCurveMixin()
        plc.load_parametrized_light_curves()

        self.descqa_catalog = descqa_catalog
        self.dither = dither
        self.min_mag = min_mag
        self.minsource = minsource
        self.proper_motion = proper_motion
        self.imsim_catalog = imsim_catalog
        self.protoDC2_ra = protoDC2_ra
        self.protoDC2_dec = protoDC2_dec

        self.phot_params = PhotometricParameters(nexp=1, exptime=30)
        self.bp_dict = BandpassDict.loadTotalBandpassesFromFiles()

        self.obs_gen = ObservationMetaDataGenerator(database=opsimdb,
                                                    driver='sqlite')

        self.star_db = StarObj(database='LSSTCATSIM',
                               host='fatboy.phys.washington.edu',
                               port=1433,
                               driver='mssql+pymssql')

        if agn_db_name is None:
            raise IOError("Need to specify an Proto DC2 AGN database.")
        else:
            if os.path.exists(agn_db_name):
                self.agn_db_name = agn_db_name
            else:
                raise IOError("Path to Proto DC2 AGN database does not exist.")

        self.sprinkler = sprinkler

        self.instcats = get_instance_catalogs(imsim_catalog)
示例#29
0
        type=str,
        default=os.path.join(getPackageDir('twinkles'), 'data'),
        help='directory containing the source data for the InstanceCatalogs')
    args = parser.parse_args()

    # set the filename default to a sensible value using the obsHistID
    if args.outfile is None:
        args.outfile = phoSimInputFileName(args.visit,
                                           prefix='phosim_input',
                                           suffix='.txt',
                                           location='./')
    # Set up OpSim database
    opSimDBPath = os.path.join(args.OpSimDBDir, args.opsimDB)
    engine = create_engine('sqlite:///' + opSimDBPath)

    obs_gen = ObservationMetaDataGenerator(database=opSimDBPath)
    sql_query = 'SELECT * FROM Summary WHERE ObsHistID == {}'.format(
        args.visit)
    df = pd.read_sql_query(sql_query, engine)
    recs = df.to_records()
    obsMetaDataResults = obs_gen.ObservationMetaDataFromPointingArray(recs)
    obs_metaData = obsMetaDataResults[0]
    sn_sed_file_dir = os.path.join(args.seddir, 'spectra_files')

    availConns = None
    print('will generate pointing for {0} and write to filename {1}'.format(
        obs_metaData._OpsimMetaData['obsHistID'], args.outfile))
    generateSinglePointing(obs_metaData,
                           availableConns=availConns,
                           sntable='TwinkSN_run3',
                           fname=args.outfile,
示例#30
0
    def setUpClass(cls):

        # Set directory where scratch work will be done
        cls.scratchDir = tempfile.mkdtemp(dir=ROOT, prefix='scratchSpace-')

        # ObsMetaData instance with spatial window within which we will
        # put galaxies in a fake galaxy catalog
        cls.obsMetaDataforCat = ObservationMetaData(boundType='circle',
                                                    boundLength=np.degrees(0.25),
                                                    pointingRA=np.degrees(0.13),
                                                    pointingDec=np.degrees(-1.2),
                                                    bandpassName=['r'], mjd=49350.)

        # Randomly generate self.size Galaxy positions within the spatial window
        # of obsMetaDataforCat
        cls.dbname = os.path.join(cls.scratchDir, 'galcat.db')
        cls.size = 1000
        cls.GalaxyPositionSamps = sample_obsmetadata(obsmetadata=cls.obsMetaDataforCat,
                                                     size=cls.size)

        # Create a galaxy Table overlapping with the obsMetaData Spatial Bounds
        # using positions from the samples above and a database name given by
        # self.dbname
        vals = cls._createFakeGalaxyDB()
        cls.valName = os.path.join(cls.scratchDir, 'valsFromTest.dat')
        with open(cls.valName, 'w') as f:
            for i, v in enumerate(vals[0]):
                f.write(str(np.radians(vals[0][i])) + '  ' + str(np.radians(vals[1][i])) + '\n')

        # fig, ax = plt.subplots()
        # ax.plot(vals[0][:1000], vals[1][: 1000], '.')
        # ax.plot([0.13], [-1.2], 'rs', markersize=8)
        # fig.savefig(os.path.join(cls.scratchDir, 'match_galDBPosns.pdf'))

        # Read it into a CatalogDBObject galDB
        class MyGalaxyCatalog(CatalogDBObject):
            '''
            Create a like CatalogDBObject connecting to a local sqlite database
            '''

            objid = 'mytestgals'
            tableid = 'gals'
            idColKey = 'id'
            objectTypeId = 0
            appendint = 10000
            database = cls.dbname
            # dbAddress = './testData/galcat.db'
            raColName = 'raJ2000'
            decColName = 'decJ2000'
            driver = 'sqlite'

            # columns required to convert the ra, dec values in degrees
            # to radians again
            columns = [('id', 'id', int),
                       ('raJ2000', 'raJ2000 * PI()/ 180. '),
                       ('decJ2000', 'decJ2000 * PI()/ 180.'),
                       ('redshift', 'redshift')]

        cls.galDB = MyGalaxyCatalog(database=cls.dbname)

        # Generate a set of Observation MetaData Outputs that overlap
        # the galaxies in space
        opsimPath = os.path.join(getPackageDir('sims_data'), 'OpSimData')
        opsimDB = os.path.join(opsimPath, 'opsimblitz1_1133_sqlite.db')

        generator = ObservationMetaDataGenerator(database=opsimDB)
        cls.obsMetaDataResults = generator.getObservationMetaData(limit=100,
                                                                  fieldRA=(5.0, 8.0),
                                                                  fieldDec=(-85., -60.),
                                                                  expMJD=(49300., 49400.),
                                                                  boundLength=0.15,
                                                                  boundType='circle')

        sncatalog = SNIaCatalog(db_obj=cls.galDB,
                                obs_metadata=cls.obsMetaDataResults[6],
                                column_outputs=['t0', 'flux_u', 'flux_g',
                                                'flux_r', 'flux_i', 'flux_z',
                                                'flux_y', 'mag_u', 'mag_g',
                                                'mag_r', 'mag_i', 'mag_z',
                                                'mag_y', 'adu_u', 'adu_g',
                                                'adu_r', 'adu_i', 'adu_z',
                                                'adu_y', 'mwebv'])
        sncatalog.suppressDimSN = True
        sncatalog.midSurveyTime = sncatalog.mjdobs - 20.
        sncatalog.snFrequency = 1.0
        cls.fullCatalog = os.path.join(cls.scratchDir, 'testSNCatalogTest.dat')
        sncatalog.write_catalog(cls.fullCatalog)

        # Create a SNCatalog based on GalDB, and having times of explosions
        #     overlapping the times in obsMetaData
        cls.fnameList = cls._writeManySNCatalogs(cls.obsMetaDataResults)