Ejemplo n.º 1
0
    def testBaseline(self):
        """
        Set up a baseline survey and run for a few days. A crude way to touch lots of code.
        """
        nside = fs.set_default_nside(nside=32)

        survey_length = 2.1  # days

        # Define what we want the final visit ratio map to look like
        target_map = fs.standard_goals(nside=nside)
        filters = ['u', 'g', 'r', 'i', 'z', 'y']
        surveys = []

        for filtername in filters:
            bfs = []
            bfs.append(fs.M5_diff_basis_function(filtername=filtername, nside=nside))
            bfs.append(fs.Target_map_basis_function(filtername=filtername,
                                                    target_map=target_map[filtername],
                                                    out_of_bounds_val=hp.UNSEEN, nside=nside))

            bfs.append(fs.North_south_patch_basis_function(zenith_min_alt=50., nside=nside))
            bfs.append(fs.Slewtime_basis_function(filtername=filtername, nside=nside))
            bfs.append(fs.Strict_filter_basis_function(filtername=filtername))

            weights = np.array([3.0, 0.3, 1., 3., 3.])
            surveys.append(fs.Greedy_survey_fields(bfs, weights, block_size=1, filtername=filtername,
                                                   dither=True, nside=nside))

        surveys.append(fs.Pairs_survey_scripted([], [], ignore_obs='DD'))

        # Set up the DD
        dd_surveys = fs.generate_dd_surveys()
        surveys.extend(dd_surveys)

        scheduler = fs.Core_scheduler(surveys, nside=nside)
        observatory = Speed_observatory(nside=nside)
        observatory, scheduler, observations = fs.sim_runner(observatory, scheduler,
                                                             survey_length=survey_length,
                                                             filename=None)

        # Check that a second part of a pair was taken
        assert('scripted' in observations['note'])
        # Check that the COSMOS DD was observed
        assert('DD:COSMOS' in observations['note'])
        # And the u-band
        assert('DD:u,COSMOS' in observations['note'])
        # Make sure a few different filters were observed
        assert(len(np.unique(observations['filter'])) > 3)
        # Make sure lots of observations executed
        assert(observations.size > 1000)
Ejemplo n.º 2
0
    def testsched(self):
        target_map = fs.standard_goals()['r']

        bfs = []
        bfs.append(fs.M5_diff_basis_function())
        bfs.append(fs.Target_map_basis_function(target_map=target_map))
        weights = np.array([1., 1])
        survey = fs.Simple_greedy_survey_fields(bfs, weights)
        scheduler = fs.Core_scheduler([survey])

        observatory = Speed_observatory()
        # Check that we can update conditions
        scheduler.update_conditions(observatory.return_status())

        # Check that we can get an observation out
        obs = scheduler.request_observation()
        assert (obs is not None)

        # Check that we can flush the Queue
        scheduler.flush_queue()
        assert (len(scheduler.queue) == 0)

        # Check that we can add an observation
        scheduler.add_observation(obs)
    target_map = fs.standard_goals()['r']
    filtername = 'r'

    bfs = []
    bfs.append(fs.Depth_percentile_basis_function(filtername=filtername))
    bfs.append(
        fs.Target_map_basis_function(target_map=target_map,
                                     filtername=filtername,
                                     out_of_bounds_val=hp.UNSEEN))
    bfs.append(fs.North_south_patch_basis_function(zenith_min_alt=50.))
    bfs.append(fs.Slewtime_basis_function(filtername=filtername))

    weights = np.array([1., 0.2, 1., 2.])
    survey = fs.Greedy_survey_fields(bfs,
                                     weights,
                                     block_size=1,
                                     filtername=filtername)
    scheduler = fs.Core_scheduler([survey])

    observatory = Speed_observatory()
    observatory, scheduler, observations = fs.sim_runner(
        observatory,
        scheduler,
        survey_length=survey_length,
        filename='one_filter_10yr.db',
        delete_past=True)

#real    2218m35.723s  37 hours
#user    2183m17.980s
#sys     9m35.290s
Ejemplo n.º 4
0
                                       filtername='gri'))
    weights = np.array([3.0, 0.3, 0.3, 1., 3., 3., 0., 3.])
    # Might want to try ignoring DD observations here, so the DD area gets covered normally--DONE
    sv = fs.Greedy_survey_fields(bfs,
                                 weights,
                                 block_size=1,
                                 filtername=filtername,
                                 dither=True,
                                 nside=nside,
                                 ignore_obs='DD')
    greedy_surveys.append(sv)

# Set up the DD surveys
dd_surveys = fs.generate_dd_surveys()

survey_list_o_lists = [dd_surveys, surveys, greedy_surveys]

scheduler = fs.Core_scheduler(survey_list_o_lists, nside=nside)
n_visit_limit = None
observatory = Speed_observatory(nside=nside, quickTest=True)
observatory, scheduler, observations = fs.sim_runner(
    observatory,
    scheduler,
    survey_length=survey_length,
    filename='rolling_mix_%iyrs.db' % years,
    delete_past=True,
    n_visit_limit=n_visit_limit)
t1 = time.time()
delta_t = t1 - t0
print('ran in %.1f min = %.1f hours' % (delta_t / 60., delta_t / 3600.))
Ejemplo n.º 5
0
        weights = np.array([3.0, 0.4, 1., 2., 3.])
        surveys.append(fs.Greedy_survey_fields(bfs, weights, block_size=1, filtername=filtername, dither=True))

    surveys.append(fs.Pairs_survey_scripted([], [], ignore_obs='DD'))

    # Set up the DD
    dd_survey = fs.Scripted_survey([], [])
    names = ['RA', 'dec', 'mjd', 'filter']
    types = [float, float, float, '|1U']
    observations = np.loadtxt('minion_dd.csv', skiprows=1, dtype=list(zip(names, types)), delimiter=',')
    exptimes = np.zeros(observations.size)
    exptimes.fill(30.)
    observations = append_fields(observations, 'exptime', exptimes)
    nexp = np.zeros(observations.size)
    nexp.fill(2)
    observations = append_fields(observations, 'nexp', nexp)
    notes = np.zeros(observations.size, dtype='|2U')
    notes.fill('DD')
    observations = append_fields(observations, 'note', notes)
    dd_survey.set_script(observations)
    surveys.append(dd_survey)

    scheduler = fs.Core_scheduler(surveys)
    observatory = Speed_observatory()
    observatory, scheduler, observations = fs.sim_runner(observatory, scheduler,
                                                         survey_length=survey_length,
                                                         filename='full_survey_%i.db' % years,
                                                         delete_past=True)

# 1 year, 445min = 7hr
# 1 year  383m58.160s
Ejemplo n.º 6
0
        surveys.append(fs.Greedy_survey_fields(bfs, weights, block_size=1, filtername=filtername,
                                               dither=True, nside=nside))

    surveys.append(fs.Pairs_survey_scripted([], [], ignore_obs='DD'))

    # Set up the DD
    dd_survey = fs.Scripted_survey([], [])
    names = ['RA', 'dec', 'mjd', 'filter']
    types = [float, float, float, '|1U']
    observations = np.loadtxt('minion_dd.csv', skiprows=1, dtype=list(zip(names, types)), delimiter=',')
    exptimes = np.zeros(observations.size)
    exptimes.fill(30.)
    observations = append_fields(observations, 'exptime', exptimes)
    nexp = np.zeros(observations.size)
    nexp.fill(2)
    observations = append_fields(observations, 'nexp', nexp)
    notes = np.zeros(observations.size, dtype='|2U')
    notes.fill('DD')
    observations = append_fields(observations, 'note', notes)
    dd_survey.set_script(observations)
    surveys.append(dd_survey)

    scheduler = fs.Core_scheduler(surveys, nside=nside)
    observatory = Speed_observatory(nside=nside)
    observatory, scheduler, observations = fs.sim_runner(observatory, scheduler,
                                                         survey_length=survey_length,
                                                         filename='full_nside32_%i.db' % years,
                                                         delete_past=True)

#  1962m28.940s = 32.7 hr
Ejemplo n.º 7
0
def generate_slair_scheduler():
    nside = fs.set_default_nside(nside=32)
    # get rid of silly northern strip.
    target_map = fs.standard_goals(nside=nside)
    norm_factor = fs.calc_norm_factor(target_map)
    # List to hold all the surveys (for easy plotting later)
    surveys = []

    # Set up observations to be taken in blocks
    filter1s = ['u', 'g', 'r', 'i', 'z', 'y']
    filter2s = [None, 'g', 'r', 'i', None, None]
    pair_surveys = []
    for filtername, filtername2 in zip(filter1s, filter2s):
        bfs = []
        bfs.append(
            fs.M5_diff_basis_function(filtername=filtername, nside=nside))
        if filtername2 is not None:
            bfs.append(
                fs.M5_diff_basis_function(filtername=filtername2, nside=nside))
        bfs.append(
            fs.Target_map_basis_function(filtername=filtername,
                                         target_map=target_map[filtername],
                                         out_of_bounds_val=hp.UNSEEN,
                                         nside=nside,
                                         norm_factor=norm_factor))
        if filtername2 is not None:
            bfs.append(
                fs.Target_map_basis_function(
                    filtername=filtername2,
                    target_map=target_map[filtername2],
                    out_of_bounds_val=hp.UNSEEN,
                    nside=nside,
                    norm_factor=norm_factor))
        bfs.append(
            fs.Slewtime_basis_function(filtername=filtername, nside=nside))
        bfs.append(fs.Strict_filter_basis_function(filtername=filtername))
        bfs.append(
            fs.Zenith_shadow_mask_basis_function(nside=nside,
                                                 shadow_minutes=60.,
                                                 max_alt=76.))
        weights = np.array([3.0, 3.0, .3, .3, 3., 3., 0.])
        if filtername2 is None:
            # Need to scale weights up so filter balancing still works properly.
            weights = np.array([6.0, 0.6, 3., 3., 0.])
        # XXX-
        # This is where we could add a look-ahead basis function to include m5_diff in the future.
        # Actually, having a near-future m5 would also help prevent switching to u or g right at twilight?
        # Maybe just need a "filter future" basis function?
        if filtername2 is None:
            survey_name = 'blob, %s' % filtername
        else:
            survey_name = 'blob, %s%s' % (filtername, filtername2)
        surveys.append(
            fs.Blob_survey(bfs,
                           weights,
                           filtername=filtername,
                           filter2=filtername2,
                           survey_note=survey_name))
        pair_surveys.append(surveys[-1])

    # Let's set up some standard surveys as well to fill in the gaps. This is my old silly masked version.
    # It would be good to put in Tiago's verion and lift nearly all the masking. That way this can also
    # chase sucker holes.
    filters = ['u', 'g', 'r', 'i', 'z', 'y']
    #filters = ['i', 'z', 'y']
    greedy_surveys = []
    for filtername in filters:
        bfs = []
        bfs.append(
            fs.M5_diff_basis_function(filtername=filtername, nside=nside))
        bfs.append(
            fs.Target_map_basis_function(filtername=filtername,
                                         target_map=target_map[filtername],
                                         out_of_bounds_val=hp.UNSEEN,
                                         nside=nside,
                                         norm_factor=norm_factor))

        bfs.append(
            fs.North_south_patch_basis_function(zenith_min_alt=50.,
                                                nside=nside))
        bfs.append(
            fs.Slewtime_basis_function(filtername=filtername, nside=nside))
        bfs.append(fs.Strict_filter_basis_function(filtername=filtername))
        bfs.append(
            fs.Zenith_shadow_mask_basis_function(nside=nside,
                                                 shadow_minutes=60.,
                                                 max_alt=76.))
        weights = np.array([3.0, 0.3, 1., 3., 3., 0.])
        # Might want to try ignoring DD observations here, so the DD area gets covered normally--DONE
        surveys.append(
            fs.Greedy_survey_fields(bfs,
                                    weights,
                                    block_size=1,
                                    filtername=filtername,
                                    dither=True,
                                    nside=nside,
                                    ignore_obs='DD'))
        greedy_surveys.append(surveys[-1])

    # Set up the DD surveys
    dd_surveys = fs.generate_dd_surveys()
    surveys.extend(dd_surveys)

    survey_list_o_lists = [dd_surveys, pair_surveys, greedy_surveys]

    # put in as list-of-lists so pairs get evaluated first.
    scheduler = fs.Core_scheduler(survey_list_o_lists, nside=nside)
    return scheduler
Ejemplo n.º 8
0
                                sequence='u',
                                nvis=[7],
                                survey_name='DD:u,290',
                                reward_value=100,
                                moon_up=False,
                                fraction_limit=0.0012,
                                ha_limits=([0., 0.5], [23.5, 24.]),
                                nside=nside,
                                filter_goals=filter_prop))
    return dd_surveys


if __name__ == 'config':
    survey_topology = SurveyTopology()
    survey_topology.num_general_props = 4
    survey_topology.general_propos = [
        "NorthEclipticSpur", "SouthCelestialPole", "WideFastDeep",
        "GalacticPlane"
    ]
    survey_topology.num_seq_props = 1
    survey_topology.sequence_propos = ["DeepDrillingCosmology1"]

    nside = fs.set_default_nside(nside=32)  # Required

    greedy_surveys = generate_greedy(nside)
    blob_surveys = generate_blob_surveys(nside)
    dd_surveys = generate_dd(nside)

    scheduler = fs.Core_scheduler([dd_surveys, blob_surveys, greedy_surveys],
                                  nside=nside)  # Required