def testsched(self): target_map = standard_goals()['r'] bfs = [] bfs.append(basis_functions.M5_diff_basis_function()) bfs.append( basis_functions.Target_map_basis_function(target_map=target_map)) weights = np.array([1., 1]) survey = surveys.Greedy_survey(bfs, weights) scheduler = Core_scheduler([survey]) observatory = Model_observatory() # Check that we can update conditions scheduler.update_conditions(observatory.return_conditions()) # Check that we can get an observation out obs = scheduler.request_observation() assert (obs is not None) # Check that we can flush the Queue scheduler.flush_queue() assert (len(scheduler.queue) == 0) # Check that we can add an observation scheduler.add_observation(obs)
def testBlobs(self): """ Set up a blob selection survey """ nside = 32 survey_length = 2.0 # days surveys = [] # Set up the DD dd_surveys = generate_dd_surveys(nside=nside) surveys.append(dd_surveys) surveys.append(gen_blob_surveys(nside)) surveys.append(gen_greedy_surveys(nside)) scheduler = Core_scheduler(surveys, nside=nside) observatory = Model_observatory(nside=nside) observatory, scheduler, observations = sim_runner( observatory, scheduler, survey_length=survey_length, filename=None) # Make sure some blobs executed assert ('blob, gg, b' in observations['note']) assert ('blob, gg, a' in observations['note']) # assert('blob, u' in observations['note']) # Make sure some greedy executed assert ('' in observations['note']) # Check that the a DD was observed assert ('DD:ELAISS1' in observations['note']) # Make sure a few different filters were observed assert (len(np.unique(observations['filter'])) > 3) # Make sure lots of observations executed assert (observations.size > 1000) # Make sure nothing tried to look through the earth assert (np.min(observations['alt']) > 0)
def testGreedy(self): """ Set up a greedy survey and run for a few days. A crude way to touch lots of code. """ nside = 32 survey_length = 2.0 # days surveys = gen_greedy_surveys(nside) # Depricating Pairs_survey_scripted #surveys.append(Pairs_survey_scripted(None, ignore_obs='DD')) # Set up the DD dd_surveys = generate_dd_surveys(nside=nside) surveys.extend(dd_surveys) scheduler = Core_scheduler(surveys, nside=nside) observatory = Model_observatory(nside=nside) observatory, scheduler, observations = sim_runner( observatory, scheduler, survey_length=survey_length, filename=None) # Check that greedy observed some assert ('' in observations['note']) # Check that the a DD was observed assert ('DD:ELAISS1' in observations['note']) # Make sure a few different filters were observed assert (len(np.unique(observations['filter'])) > 3) # Make sure lots of observations executed assert (observations.size > 1000) # Make sure nothing tried to look through the earth assert (np.min(observations['alt']) > 0)
def run_sched(surveys, survey_length=365.25, nside=32, fileroot='baseline_', verbose=False, extra_info=None, cloud_limit=0.3, downtime=True): years = np.round(survey_length/365.25) scheduler = Core_scheduler(surveys, nside=nside) n_visit_limit = None observatory = Model_observatory(nside=nside, cloud_limit=cloud_limit) # If we want to remove the downtime. if not downtime: down_starts=[-667] down_ends=[-666] observatory.downtimes = np.array(list(zip(down_starts, down_ends)), dtype=list(zip(['start', 'end'], [float, float]))) observatory, scheduler, observations = sim_runner(observatory, scheduler, survey_length=survey_length, filename=fileroot+'%iyrs.db' % years, delete_past=True, n_visit_limit=n_visit_limit, verbose=verbose, extra_info=extra_info)
def run_sched(surveys, survey_length=365.25, nside=32, fileroot='baseline_', verbose=False, extra_info=None): years = np.round(survey_length/365.25) scheduler = Core_scheduler(surveys, nside=nside) n_visit_limit = None observatory = Model_observatory(nside=nside) observatory, scheduler, observations = sim_runner(observatory, scheduler, survey_length=survey_length, filename=fileroot+'%iyrs.db' % years, delete_past=True, n_visit_limit=n_visit_limit, verbose=verbose, extra_info=extra_info)
def run_sched(surveys, survey_length=None, nside=32, fileroot='baseline_', verbose=False, extra_info=None, illum_limit=40., survey_lengths=None, pause_lengths=None, delete_past=True): years = np.round(np.sum(survey_lengths) / 365.25) filename = fileroot + '%iyrs.db' % years scheduler = Core_scheduler(surveys, nside=nside) n_visit_limit = None filter_sched = simple_filter_sched(illum_limit=illum_limit) observatory = Model_observatory(nside=nside) observations = [] for survey_length, pause_length in zip(survey_lengths, pause_lengths): observatory, scheduler, observations1 = sim_runner( observatory, scheduler, survey_length=survey_length, filename=None, delete_past=True, n_visit_limit=n_visit_limit, verbose=verbose, extra_info=extra_info, filter_scheduler=filter_sched) observatory.mjd += pause_length observations.append(observations1) # Now for the last set of observations survey_length = survey_lengths[-1] observatory, scheduler, observations1 = sim_runner( observatory, scheduler, survey_length=survey_length, filename=None, delete_past=True, n_visit_limit=n_visit_limit, verbose=verbose, extra_info=extra_info, filter_scheduler=filter_sched) observations.append(observations1) observations = np.concatenate(observations) info = run_info_table(observatory, extra_info=extra_info) converter = schema_converter() print("Writing combined results to %s" % filename) converter.obs2opsim(observations, filename=filename, info=info, delete_past=delete_past)
def run_sched(surveys, survey_length=365.25, nside=32, fileroot='greedy_'): years = np.round(survey_length / 365.25) scheduler = fs.schedulers.Core_scheduler(surveys, nside=nside) n_visit_limit = None observatory = Model_observatory(nside=nside) observatory, scheduler, observations = fs.sim_runner( observatory, scheduler, survey_length=survey_length, filename=fileroot + '%iyrs.db' % years, delete_past=True, n_visit_limit=n_visit_limit)
def run_sched(surveys, survey_length=365.25, nside=32, fileroot='baseline_', verbose=False, extra_info=None, illum_limit=40.): years = np.round(survey_length / 365.25) scheduler = Core_scheduler(surveys, nside=nside) n_visit_limit = None filter_sched = simple_filter_sched(illum_limit=illum_limit) observatory = Model_observatory(nside=nside) conditions_init = copy.deepcopy(observatory.return_conditions()) scheduler.update_conditions(observatory.return_conditions()) desired_obs = scheduler.request_observation(mjd=observatory.mjd) print(desired_obs) completed_obs, new_night = observatory.observe(desired_obs) print(completed_obs) # Check if there's any difference between where we wanted to point, and where it pointed for key in ['RA', 'dec', 'rotSkyPos']: print(key, desired_obs[key] - completed_obs[key], desired_obs[key][0]) conditions_final = observatory.return_conditions() print(scheduler.survey_lists[2][-2].fields['dec'][3410]) import pdb pdb.set_trace()
def run_sched(surveys, survey_length=365.25, nside=32, fileroot='baseline_cloud11_', verbose=False, extra_info=None, illum_limit=40.): years = np.round(survey_length/365.25) scheduler = Core_scheduler(surveys, nside=nside) n_visit_limit = None filter_sched = simple_filter_sched(illum_limit=illum_limit) observatory = Model_observatory(nside=nside, cloud_db="./cloud_58967.db", cloud_offset_year=-11) observatory, scheduler, observations = sim_runner(observatory, scheduler, survey_length=survey_length, filename=fileroot+'%iyrs.db' % years, delete_past=True, n_visit_limit=n_visit_limit, verbose=verbose, extra_info=extra_info, filter_scheduler=filter_sched)
def run_sched(surveys, survey_length=None, nside=32, fileroot='baseline_', verbose=False, extra_info=None, illum_limit=40., sl1=730, sl2=2922, pause_length=183, delete_past=True): years = np.round((sl1 + sl2 + pause_length) / 365.25) filename = fileroot + '%iyrs.db' % years scheduler = Core_scheduler(surveys, nside=nside) n_visit_limit = None filter_sched = simple_filter_sched(illum_limit=illum_limit) observatory = Model_observatory(nside=nside) observatory, scheduler, observations1 = sim_runner( observatory, scheduler, survey_length=sl1, filename=None, delete_past=True, n_visit_limit=n_visit_limit, verbose=verbose, extra_info=extra_info, filter_scheduler=filter_sched) observatory.mjd += pause_length observatory, scheduler, observations2 = sim_runner( observatory, scheduler, survey_length=sl2, filename=None, delete_past=True, n_visit_limit=n_visit_limit, verbose=verbose, extra_info=extra_info, filter_scheduler=filter_sched) observations = np.concatenate([observations1, observations2]) info = run_info_table(observatory, extra_info=extra_info) converter = schema_converter() converter.obs2opsim(observations, filename=filename, info=info, delete_past=delete_past)
['git', 'rev-parse', 'HEAD']) except subprocess.CalledProcessError: extra_info['git hash'] = 'Not in git repo' extra_info['file executed'] = os.path.realpath(__file__) fileroot = 'weather_%.1f_' % cloud_limit file_end = 'v1.6_' if scale_down: footprints_hp = nes_light_footprints(nside=nside) fileroot = fileroot + 'scaleddown_' else: footprints_hp = standard_goals(nside=nside) observatory = Model_observatory(nside=nside) conditions = observatory.return_conditions() footprints = Footprint(conditions.mjd_start, sun_RA_start=conditions.sun_RA_start, nside=nside) for i, key in enumerate(footprints_hp): footprints.footprints[i, :] = footprints_hp[key] # Set up the DDF surveys to dither dither_detailer = detailers.Dither_detailer(per_night=per_night, max_dither=max_dither) details = [ detailers.Camera_rot_detailer(min_rot=-camera_ddf_rot_limit, max_rot=camera_ddf_rot_limit), dither_detailer ]
filtername2=filtername2, ideal_pair_time=pair_time, nside=nside, survey_note=survey_name, ignore_obs='DD', dither=True)) return surveys if __name__ == "__main__": nside = 32 survey_length = 365.25 * 10 # Days years = int(survey_length / 365.25) greedy = gen_greedy_surveys(nside) ddfs = generate_dd_surveys(nside=nside) blobs = generate_blobs(nside) surveys = [ddfs, blobs, greedy] n_visit_limit = None scheduler = Core_scheduler(surveys, nside=nside) observatory = Model_observatory(nside=nside) observatory, scheduler, observations = sim_runner( observatory, scheduler, survey_length=survey_length, filename='blob_no_slew_%i.db' % years, n_visit_limit=n_visit_limit)
max_dither=max_dither) details = [ detailers.Camera_rot_detailer(min_rot=-87., max_rot=87.), dither_detailer ] ddfs = generate_dd_surveys(nside=nside, nexp=nexp, detailers=details) greedy = gen_greedy_surveys(nside, nexp=nexp) blobs = generate_blobs(nside, nexp=nexp, mixed_pairs=True) surveys = [ddfs, blobs, greedy] scheduler = Core_scheduler(surveys, nside=nside) for obs in observations[0:max_indx]: scheduler.add_observation(obs) observatory = Model_observatory(nside=nside) observatory.mjd = obs['mjd'] _ = observatory.observe(obs) observatory.mjd = obs['mjd'] _ = observatory.observe(obs) conditions = observatory.return_conditions() scheduler.update_conditions(conditions) rewards = [] reward_sizes = [] for survey_list in scheduler.survey_lists: rw = [] rws = [] for survey in survey_list: reward = survey.calc_reward_function(conditions)
exec_command += ' ' + arg extra_info['exec command'] = exec_command extra_info['git hash'] = subprocess.check_output( ['git', 'rev-parse', 'HEAD']) extra_info['file executed'] = os.path.realpath(__file__) # Generate some simulated ToOs simple_too = False if simple_too: sim_ToOs = generate_events_simple(nside=nside) event_table = None else: sim_ToOs, event_table = generate_events(nside=nside, survey_length=survey_length, rate=too_rate) observatory = Model_observatory(nside=nside, sim_ToO=sim_ToOs) if Pairs: if mixedPairs: # mixed pairs. toos = gen_too_surveys(nside=nside, nexp=nexp, nvis=nvis) greedy = gen_greedy_surveys(nside, nexp=nexp) ddfs = generate_dd_surveys(nside=nside, nexp=nexp) blobs = generate_blobs(nside, nexp=nexp, mixed_pairs=True) surveys = [toos, ddfs, blobs, greedy] run_sched(surveys, observatory, survey_length=survey_length, verbose=verbose, fileroot=os.path.join(outDir, 'too_pairsmix_rate%i_' % too_rate),
scheduler, survey_length=survey_length, filename=fileroot + '%iyrs.db' % years, delete_past=True, n_visit_limit=n_visit_limit, verbose=verbose, extra_info=extra_info, filter_scheduler=filter_sched) if __name__ == "__main__": nside = 32 scheduler = make_scheduler() observatory = Model_observatory(nside=nside) sco = schema_converter() observations = sco.opsim2obs('baseline_nexp1_v1.6_1yrs.db') indx = 64840 - 1 for obs in observations[0:indx]: scheduler.add_observation(obs) observatory.mjd = obs['mjd'] # Observatory starts parked, so need to send an expose command to slew to the correct position temp = observatory.observe(obs) # It took some time to make that slew, so reset the time again observatory.mjd = observations[indx]['mjd'] should_match_obs, new_night = observatory.observe(obs) conditions = observatory.return_conditions() scheduler.update_conditions(conditions) for survey in scheduler.survey_lists[-1]: