def target(self, x):
     x[0] = 5 # reduce redundant solutions
     for survey in self.surveys:
         survey.basis_weights = x
     scheduler = fs.Core_scheduler_cost(self.surveys)
     observatory = Speed_observatory()
     observatory, scheduler, observations = fs.sim_runner(observatory, scheduler, survey_length=self.survey_length)
     return -1 * fs.simple_performance_measure(observations, self.pref)
Exemple #2
0
    def testBaseline(self):
        """
        Set up a baseline survey and run for a few days. A crude way to touch lots of code.
        """
        nside = fs.set_default_nside(nside=32)

        survey_length = 2.1  # days

        # Define what we want the final visit ratio map to look like
        target_map = fs.standard_goals(nside=nside)
        filters = ['u', 'g', 'r', 'i', 'z', 'y']
        surveys = []

        for filtername in filters:
            bfs = []
            bfs.append(fs.M5_diff_basis_function(filtername=filtername, nside=nside))
            bfs.append(fs.Target_map_basis_function(filtername=filtername,
                                                    target_map=target_map[filtername],
                                                    out_of_bounds_val=hp.UNSEEN, nside=nside))

            bfs.append(fs.North_south_patch_basis_function(zenith_min_alt=50., nside=nside))
            bfs.append(fs.Slewtime_basis_function(filtername=filtername, nside=nside))
            bfs.append(fs.Strict_filter_basis_function(filtername=filtername))

            weights = np.array([3.0, 0.3, 1., 3., 3.])
            surveys.append(fs.Greedy_survey_fields(bfs, weights, block_size=1, filtername=filtername,
                                                   dither=True, nside=nside))

        surveys.append(fs.Pairs_survey_scripted([], [], ignore_obs='DD'))

        # Set up the DD
        dd_surveys = fs.generate_dd_surveys()
        surveys.extend(dd_surveys)

        scheduler = fs.Core_scheduler(surveys, nside=nside)
        observatory = Speed_observatory(nside=nside)
        observatory, scheduler, observations = fs.sim_runner(observatory, scheduler,
                                                             survey_length=survey_length,
                                                             filename=None)

        # Check that a second part of a pair was taken
        assert('scripted' in observations['note'])
        # Check that the COSMOS DD was observed
        assert('DD:COSMOS' in observations['note'])
        # And the u-band
        assert('DD:u,COSMOS' in observations['note'])
        # Make sure a few different filters were observed
        assert(len(np.unique(observations['filter'])) > 3)
        # Make sure lots of observations executed
        assert(observations.size > 1000)
    def testsched(self):
        target_map = fs.standard_goals()['r']

        bfs = []
        bfs.append(fs.M5_diff_basis_function())
        bfs.append(fs.Target_map_basis_function(target_map=target_map))
        weights = np.array([1., 1])
        survey = fs.Simple_greedy_survey_fields(bfs, weights)
        scheduler = fs.Core_scheduler([survey])

        observatory = Speed_observatory()
        # Check that we can update conditions
        scheduler.update_conditions(observatory.return_status())

        # Check that we can get an observation out
        obs = scheduler.request_observation()
        assert (obs is not None)

        # Check that we can flush the Queue
        scheduler.flush_queue()
        assert (len(scheduler.queue) == 0)

        # Check that we can add an observation
        scheduler.add_observation(obs)
    target_map = fs.standard_goals()['r']
    filtername = 'r'

    bfs = []
    bfs.append(fs.Depth_percentile_basis_function(filtername=filtername))
    bfs.append(
        fs.Target_map_basis_function(target_map=target_map,
                                     filtername=filtername,
                                     out_of_bounds_val=hp.UNSEEN))
    bfs.append(fs.North_south_patch_basis_function(zenith_min_alt=50.))
    bfs.append(fs.Slewtime_basis_function(filtername=filtername))

    weights = np.array([1., 0.2, 1., 2.])
    survey = fs.Greedy_survey_fields(bfs,
                                     weights,
                                     block_size=1,
                                     filtername=filtername)
    scheduler = fs.Core_scheduler([survey])

    observatory = Speed_observatory()
    observatory, scheduler, observations = fs.sim_runner(
        observatory,
        scheduler,
        survey_length=survey_length,
        filename='one_filter_10yr.db',
        delete_past=True)

#real    2218m35.723s  37 hours
#user    2183m17.980s
#sys     9m35.290s
Exemple #5
0
from lsst.sims.speedObservatory import Speed_observatory
import lsst.sims.featureScheduler as fs
import numpy as np
from blob_same_zmask import generate_slair_scheduler
import time

t0 = time.time()

survey_length = 1.
years = np.round(survey_length/365.25)
nside = fs.set_default_nside(nside=32)
scheduler = generate_slair_scheduler()

observatory = Speed_observatory(nside=nside, quickTest=False)
observatory, scheduler, observations = fs.sim_runner(observatory, scheduler,
                                                     survey_length=survey_length,
                                                     filename='blobs_same_zmask%iyrs.db' % years,
                                                     delete_past=True)


trun = time.time() - t0
print('ran in %i, %i minutes=%i hours' % (trun, trun/60., trun/3600.))
Exemple #6
0
                                       filtername='gri'))
    weights = np.array([3.0, 0.3, 0.3, 1., 3., 3., 0., 3.])
    # Might want to try ignoring DD observations here, so the DD area gets covered normally--DONE
    sv = fs.Greedy_survey_fields(bfs,
                                 weights,
                                 block_size=1,
                                 filtername=filtername,
                                 dither=True,
                                 nside=nside,
                                 ignore_obs='DD')
    greedy_surveys.append(sv)

# Set up the DD surveys
dd_surveys = fs.generate_dd_surveys()

survey_list_o_lists = [dd_surveys, surveys, greedy_surveys]

scheduler = fs.Core_scheduler(survey_list_o_lists, nside=nside)
n_visit_limit = None
observatory = Speed_observatory(nside=nside, quickTest=True)
observatory, scheduler, observations = fs.sim_runner(
    observatory,
    scheduler,
    survey_length=survey_length,
    filename='rolling_mix_%iyrs.db' % years,
    delete_past=True,
    n_visit_limit=n_visit_limit)
t1 = time.time()
delta_t = t1 - t0
print('ran in %.1f min = %.1f hours' % (delta_t / 60., delta_t / 3600.))
        surveys.append(fs.Greedy_survey_fields(bfs, weights, block_size=1, filtername=filtername,
                                               dither=True, nside=nside))

    surveys.append(fs.Pairs_survey_scripted([], [], ignore_obs='DD'))

    # Set up the DD
    dd_survey = fs.Scripted_survey([], [])
    names = ['RA', 'dec', 'mjd', 'filter']
    types = [float, float, float, '|1U']
    observations = np.loadtxt('minion_dd.csv', skiprows=1, dtype=list(zip(names, types)), delimiter=',')
    exptimes = np.zeros(observations.size)
    exptimes.fill(30.)
    observations = append_fields(observations, 'exptime', exptimes)
    nexp = np.zeros(observations.size)
    nexp.fill(2)
    observations = append_fields(observations, 'nexp', nexp)
    notes = np.zeros(observations.size, dtype='|2U')
    notes.fill('DD')
    observations = append_fields(observations, 'note', notes)
    dd_survey.set_script(observations)
    surveys.append(dd_survey)

    scheduler = fs.Core_scheduler(surveys, nside=nside)
    observatory = Speed_observatory(nside=nside)
    observatory, scheduler, observations = fs.sim_runner(observatory, scheduler,
                                                         survey_length=survey_length,
                                                         filename='full_nside32_%i.db' % years,
                                                         delete_past=True)

#  1962m28.940s = 32.7 hr
Exemple #8
0
    for filtername in filters:
        bfs = []
        bfs.append(fs.M5_diff_basis_function(filtername=filtername, nside=nside))
        bfs.append(fs.Target_map_basis_function(filtername=filtername,
                                                target_map=target_map[filtername],
                                                out_of_bounds_val=hp.UNSEEN, nside=nside))

        bfs.append(fs.North_south_patch_basis_function(zenith_min_alt=50., nside=nside))
        #bfs.append(fs.Zenith_mask_basis_function(maxAlt=78., penalty=-100, nside=nside))
        bfs.append(fs.Slewtime_basis_function(filtername=filtername, nside=nside))
        bfs.append(fs.Strict_filter_basis_function(filtername=filtername))

        weights = np.array([3.0, 0.3, 1., 3., 3.])
        # Might want to try ignoring DD observations here, so the DD area gets covered normally--DONE
        surveys.append(fs.Greedy_survey_fields(bfs, weights, block_size=1, filtername=filtername,
                                               dither=True, nside=nside, ignore_obs='DD'))

    surveys.append(fs.Pairs_survey_scripted([], [], ignore_obs='DD', min_alt=20.))

    # Set up the DD
    dd_surveys = fs.generate_dd_surveys()
    surveys.extend(dd_surveys)

    scheduler = fs.Core_scheduler(surveys, nside=nside)
    observatory = Speed_observatory(nside=nside, mjd_start=59853.)
    observatory, scheduler, observations = fs.sim_runner(observatory, scheduler,
                                                         survey_length=survey_length,
                                                         filename='my_baseline_newSpeed%iyrs.db' % years,
                                                         delete_past=True)

# real    1751m55.325s = 29 hours