def run_sched(surveys, survey_length=None, nside=32, fileroot='baseline_', verbose=False, extra_info=None, illum_limit=40., survey_lengths=None, pause_lengths=None, delete_past=True): years = np.round(np.sum(survey_lengths) / 365.25) filename = fileroot + '%iyrs.db' % years scheduler = Core_scheduler(surveys, nside=nside) n_visit_limit = None filter_sched = simple_filter_sched(illum_limit=illum_limit) observatory = Model_observatory(nside=nside) observations = [] for survey_length, pause_length in zip(survey_lengths, pause_lengths): observatory, scheduler, observations1 = sim_runner( observatory, scheduler, survey_length=survey_length, filename=None, delete_past=True, n_visit_limit=n_visit_limit, verbose=verbose, extra_info=extra_info, filter_scheduler=filter_sched) observatory.mjd += pause_length observations.append(observations1) # Now for the last set of observations survey_length = survey_lengths[-1] observatory, scheduler, observations1 = sim_runner( observatory, scheduler, survey_length=survey_length, filename=None, delete_past=True, n_visit_limit=n_visit_limit, verbose=verbose, extra_info=extra_info, filter_scheduler=filter_sched) observations.append(observations1) observations = np.concatenate(observations) info = run_info_table(observatory, extra_info=extra_info) converter = schema_converter() print("Writing combined results to %s" % filename) converter.obs2opsim(observations, filename=filename, info=info, delete_past=delete_past)
def testBlobs(self): """ Set up a blob selection survey """ nside = 32 survey_length = 2.0 # days surveys = [] # Set up the DD dd_surveys = generate_dd_surveys(nside=nside) surveys.append(dd_surveys) surveys.append(gen_blob_surveys(nside)) surveys.append(gen_greedy_surveys(nside)) scheduler = Core_scheduler(surveys, nside=nside) observatory = Model_observatory(nside=nside) observatory, scheduler, observations = sim_runner( observatory, scheduler, survey_length=survey_length, filename=None) # Make sure some blobs executed assert ('blob, gg, b' in observations['note']) assert ('blob, gg, a' in observations['note']) # assert('blob, u' in observations['note']) # Make sure some greedy executed assert ('' in observations['note']) # Check that the a DD was observed assert ('DD:ELAISS1' in observations['note']) # Make sure a few different filters were observed assert (len(np.unique(observations['filter'])) > 3) # Make sure lots of observations executed assert (observations.size > 1000) # Make sure nothing tried to look through the earth assert (np.min(observations['alt']) > 0)
def testGreedy(self): """ Set up a greedy survey and run for a few days. A crude way to touch lots of code. """ nside = 32 survey_length = 2.0 # days surveys = gen_greedy_surveys(nside) # Depricating Pairs_survey_scripted #surveys.append(Pairs_survey_scripted(None, ignore_obs='DD')) # Set up the DD dd_surveys = generate_dd_surveys(nside=nside) surveys.extend(dd_surveys) scheduler = Core_scheduler(surveys, nside=nside) observatory = Model_observatory(nside=nside) observatory, scheduler, observations = sim_runner( observatory, scheduler, survey_length=survey_length, filename=None) # Check that greedy observed some assert ('' in observations['note']) # Check that the a DD was observed assert ('DD:ELAISS1' in observations['note']) # Make sure a few different filters were observed assert (len(np.unique(observations['filter'])) > 3) # Make sure lots of observations executed assert (observations.size > 1000) # Make sure nothing tried to look through the earth assert (np.min(observations['alt']) > 0)
def run_sched(surveys, survey_length=None, nside=32, fileroot='baseline_', verbose=False, extra_info=None, illum_limit=40., sl1=730, sl2=2922, pause_length=183, delete_past=True): years = np.round((sl1 + sl2 + pause_length) / 365.25) filename = fileroot + '%iyrs.db' % years scheduler = Core_scheduler(surveys, nside=nside) n_visit_limit = None filter_sched = simple_filter_sched(illum_limit=illum_limit) observatory = Model_observatory(nside=nside) observatory, scheduler, observations1 = sim_runner( observatory, scheduler, survey_length=sl1, filename=None, delete_past=True, n_visit_limit=n_visit_limit, verbose=verbose, extra_info=extra_info, filter_scheduler=filter_sched) observatory.mjd += pause_length observatory, scheduler, observations2 = sim_runner( observatory, scheduler, survey_length=sl2, filename=None, delete_past=True, n_visit_limit=n_visit_limit, verbose=verbose, extra_info=extra_info, filter_scheduler=filter_sched) observations = np.concatenate([observations1, observations2]) info = run_info_table(observatory, extra_info=extra_info) converter = schema_converter() converter.obs2opsim(observations, filename=filename, info=info, delete_past=delete_past)
def target(self, x): x[0] = 5 # reduce redundant solutions for survey in self.surveys: survey.basis_weights = x scheduler = fs.Core_scheduler_cost(self.surveys) observatory = Speed_observatory() observatory, scheduler, observations = fs.sim_runner(observatory, scheduler, survey_length=self.survey_length) return -1 * fs.simple_performance_measure(observations, self.pref)
def run_sched(surveys, survey_length=365.25, nside=32, fileroot='baseline_', verbose=False, extra_info=None): years = np.round(survey_length/365.25) scheduler = Core_scheduler(surveys, nside=nside) n_visit_limit = None observatory = Model_observatory(nside=nside) observatory, scheduler, observations = sim_runner(observatory, scheduler, survey_length=survey_length, filename=fileroot+'%iyrs.db' % years, delete_past=True, n_visit_limit=n_visit_limit, verbose=verbose, extra_info=extra_info)
def run_sched(surveys, survey_length=365.25, nside=32, fileroot='greedy_'): years = np.round(survey_length / 365.25) scheduler = fs.schedulers.Core_scheduler(surveys, nside=nside) n_visit_limit = None observatory = Model_observatory(nside=nside) observatory, scheduler, observations = fs.sim_runner( observatory, scheduler, survey_length=survey_length, filename=fileroot + '%iyrs.db' % years, delete_past=True, n_visit_limit=n_visit_limit)
def testBaseline(self): """ Set up a baseline survey and run for a few days. A crude way to touch lots of code. """ nside = fs.set_default_nside(nside=32) survey_length = 2.1 # days # Define what we want the final visit ratio map to look like target_map = fs.standard_goals(nside=nside) filters = ['u', 'g', 'r', 'i', 'z', 'y'] surveys = [] for filtername in filters: bfs = [] bfs.append(fs.M5_diff_basis_function(filtername=filtername, nside=nside)) bfs.append(fs.Target_map_basis_function(filtername=filtername, target_map=target_map[filtername], out_of_bounds_val=hp.UNSEEN, nside=nside)) bfs.append(fs.North_south_patch_basis_function(zenith_min_alt=50., nside=nside)) bfs.append(fs.Slewtime_basis_function(filtername=filtername, nside=nside)) bfs.append(fs.Strict_filter_basis_function(filtername=filtername)) weights = np.array([3.0, 0.3, 1., 3., 3.]) surveys.append(fs.Greedy_survey_fields(bfs, weights, block_size=1, filtername=filtername, dither=True, nside=nside)) surveys.append(fs.Pairs_survey_scripted([], [], ignore_obs='DD')) # Set up the DD dd_surveys = fs.generate_dd_surveys() surveys.extend(dd_surveys) scheduler = fs.Core_scheduler(surveys, nside=nside) observatory = Speed_observatory(nside=nside) observatory, scheduler, observations = fs.sim_runner(observatory, scheduler, survey_length=survey_length, filename=None) # Check that a second part of a pair was taken assert('scripted' in observations['note']) # Check that the COSMOS DD was observed assert('DD:COSMOS' in observations['note']) # And the u-band assert('DD:u,COSMOS' in observations['note']) # Make sure a few different filters were observed assert(len(np.unique(observations['filter'])) > 3) # Make sure lots of observations executed assert(observations.size > 1000)
def run_sched(surveys, survey_length=365.25, nside=32, fileroot='baseline_cloud11_', verbose=False, extra_info=None, illum_limit=40.): years = np.round(survey_length/365.25) scheduler = Core_scheduler(surveys, nside=nside) n_visit_limit = None filter_sched = simple_filter_sched(illum_limit=illum_limit) observatory = Model_observatory(nside=nside, cloud_db="./cloud_58967.db", cloud_offset_year=-11) observatory, scheduler, observations = sim_runner(observatory, scheduler, survey_length=survey_length, filename=fileroot+'%iyrs.db' % years, delete_past=True, n_visit_limit=n_visit_limit, verbose=verbose, extra_info=extra_info, filter_scheduler=filter_sched)
def run_sched(surveys, survey_length=365.25, nside=32, fileroot='baseline_', verbose=False, extra_info=None, cloud_limit=0.3, downtime=True): years = np.round(survey_length/365.25) scheduler = Core_scheduler(surveys, nside=nside) n_visit_limit = None observatory = Model_observatory(nside=nside, cloud_limit=cloud_limit) # If we want to remove the downtime. if not downtime: down_starts=[-667] down_ends=[-666] observatory.downtimes = np.array(list(zip(down_starts, down_ends)), dtype=list(zip(['start', 'end'], [float, float]))) observatory, scheduler, observations = sim_runner(observatory, scheduler, survey_length=survey_length, filename=fileroot+'%iyrs.db' % years, delete_past=True, n_visit_limit=n_visit_limit, verbose=verbose, extra_info=extra_info)
def run_sched(surveys, survey_length=365.25, nside=32, fileroot='baseline_', verbose=False, extra_info=None, illum_limit=40., scheduled_downtime_db=None): years = np.round(survey_length / 365.25) scheduler = Core_scheduler(surveys, nside=nside) n_visit_limit = None filter_sched = simple_filter_sched(illum_limit=illum_limit) observatory = AltDownModelObservatory(nside=nside) observatory.change_scheduled_downtime(scheduled_downtime_db) observatory, scheduler, observations = sim_runner( observatory, scheduler, survey_length=survey_length, filename=fileroot + '%iyrs.db' % years, delete_past=True, n_visit_limit=n_visit_limit, verbose=verbose, extra_info=extra_info, filter_scheduler=filter_sched)
for filtername in filters: bfs = [] bfs.append(fs.M5_diff_basis_function(filtername=filtername, nside=nside)) bfs.append(fs.Target_map_basis_function(filtername=filtername, target_map=target_map[filtername], out_of_bounds_val=hp.UNSEEN, nside=nside)) bfs.append(fs.North_south_patch_basis_function(zenith_min_alt=50., nside=nside)) #bfs.append(fs.Zenith_mask_basis_function(maxAlt=78., penalty=-100, nside=nside)) bfs.append(fs.Slewtime_basis_function(filtername=filtername, nside=nside)) bfs.append(fs.Strict_filter_basis_function(filtername=filtername)) weights = np.array([3.0, 0.3, 1., 3., 3.]) # Might want to try ignoring DD observations here, so the DD area gets covered normally--DONE surveys.append(fs.Greedy_survey_fields(bfs, weights, block_size=1, filtername=filtername, dither=True, nside=nside, ignore_obs='DD')) surveys.append(fs.Pairs_survey_scripted([], [], ignore_obs='DD', min_alt=20.)) # Set up the DD dd_surveys = fs.generate_dd_surveys() surveys.extend(dd_surveys) scheduler = fs.Core_scheduler(surveys, nside=nside) observatory = Speed_observatory(nside=nside, mjd_start=59853.) observatory, scheduler, observations = fs.sim_runner(observatory, scheduler, survey_length=survey_length, filename='my_baseline_newSpeed%iyrs.db' % years, delete_past=True) # real 1751m55.325s = 29 hours
# Define what we want the final visit ratio map to look like target_map = fs.standard_goals()['r'] filtername = 'r' bfs = [] bfs.append(fs.M5_diff_basis_function(filtername=filtername)) bfs.append( fs.Target_map_basis_function(target_map=target_map, filtername=filtername, out_of_bounds_val=hp.UNSEEN)) bfs.append(fs.North_south_patch_basis_function(zenith_min_alt=50.)) bfs.append(fs.Slewtime_basis_function(filtername=filtername)) bfs.append( fs.Rolling_mask_basis_function(mask=mask, mjd_start=59580.035)) weights = np.array([1., 0.2, 1., 2., 1.]) survey = fs.Greedy_survey_fields(bfs, weights, block_size=1, filtername=filtername, dither=True) scheduler = fs.Core_scheduler([survey]) observatory = Speed_observatory() observatory, scheduler, observations = fs.sim_runner( observatory, scheduler, survey_length=survey_length, filename='rolling_%i.db' % year, delete_past=True)
filtername='gri')) weights = np.array([3.0, 0.3, 0.3, 1., 3., 3., 0., 3.]) # Might want to try ignoring DD observations here, so the DD area gets covered normally--DONE sv = fs.Greedy_survey_fields(bfs, weights, block_size=1, filtername=filtername, dither=True, nside=nside, ignore_obs='DD') greedy_surveys.append(sv) # Set up the DD surveys dd_surveys = fs.generate_dd_surveys() survey_list_o_lists = [dd_surveys, surveys, greedy_surveys] scheduler = fs.Core_scheduler(survey_list_o_lists, nside=nside) n_visit_limit = None observatory = Speed_observatory(nside=nside, quickTest=True) observatory, scheduler, observations = fs.sim_runner( observatory, scheduler, survey_length=survey_length, filename='rolling_mix_%iyrs.db' % years, delete_past=True, n_visit_limit=n_visit_limit) t1 = time.time() delta_t = t1 - t0 print('ran in %.1f min = %.1f hours' % (delta_t / 60., delta_t / 3600.))
for filtername in filters: bfs = [] bfs.append(fs.M5_diff_basis_function(filtername=filtername, nside=nside)) bfs.append(fs.Target_map_basis_function(filtername=filtername, target_map=target_map[filtername], out_of_bounds_val=hp.UNSEEN, nside=nside)) bfs.append(fs.North_south_patch_basis_function(zenith_min_alt=50., nside=nside)) #bfs.append(fs.Zenith_mask_basis_function(maxAlt=78., penalty=-100, nside=nside)) bfs.append(fs.Slewtime_basis_function(filtername=filtername, nside=nside)) bfs.append(fs.Strict_filter_basis_function(filtername=filtername)) weights = np.array([3.0, 0.3, 1., 3., 3.]) # Might want to try ignoring DD observations here, so the DD area gets covered normally--DONE surveys.append(fs.Greedy_survey_fields(bfs, weights, block_size=1, filtername=filtername, dither=True, nside=nside, ignore_obs='DD')) surveys.append(fs.Pairs_survey_scripted([], [], ignore_obs='DD', min_alt=20.)) # Set up the DD dd_surveys = fs.generate_dd_surveys() surveys.extend(dd_surveys) scheduler = fs.Core_scheduler(surveys, nside=nside) observatory = Speed_observatory(nside=nside) observatory, scheduler, observations = fs.sim_runner(observatory, scheduler, survey_length=survey_length, filename='feature_baseline_update_%iyrs.db' % years, delete_past=True) # real 1751m55.325s = 29 hours
from lsst.sims.speedObservatory import Speed_observatory import healpy as hp if __name__ == "__main__": survey_length = 5 # days # Define what we want the final visit ratio map to look like target_map = fs.standard_goals()['r'] bfs = [] bfs.append(fs.Depth_percentile_basis_function()) bfs.append(fs.Target_map_basis_function(target_map=target_map)) bfs.append(fs.Quadrant_basis_function()) bfs.append(fs.Slewtime_basis_function()) weights = np.array([.5, 1., 1., 1.]) survey = fs.Simple_greedy_survey_fields(bfs, weights, block_size=1) scheduler = fs.Core_scheduler([survey]) observatory = Speed_observatory() observatory, scheduler, observations = fs.sim_runner( observatory, scheduler, survey_length=survey_length, filename='marching_d%i.db' % survey_length, delete_past=True) # block_size=10, surveylength of 365 had runtime of 163 min. and got 0.26e6 observations. So, 10 years would be 27 hours. # Going to block_size=1, runtime of 211 min, and 0.33e6 observations. So, 35 hours. Not too shabby! # Up to 221 min after adding in a few more columns
from lsst.sims.featureScheduler.observatory import Speed_observatory import os if __name__ == "__main__": filename = 'one_filter.db' if os.path.exists(filename): os.remove(filename) survey_length = 365. # days # Define what we want the final visit ratio map to look like target_map = fs.standard_goals()['r'] # Make a list of the basis functions we want to use bfs = [] # Reward looking at parts of the sky with good 5-sigma depth bfs.append(fs.Depth_percentile_basis_function(filtername='r')) # Reward parts of the survey that have fallen behind bfs.append(fs.Target_map_basis_function(target_map=target_map)) # Reward smaller slewtimes bfs.append(fs.Slewtime_basis_function(filtername='r')) weights = np.array([10., 1., 1.]) # Use just the opsim fields for simplicity survey = fs.Simple_greedy_survey_fields(bfs, weights) scheduler = fs.Core_scheduler([survey]) observatory = Speed_observatory() observatory, scheduler, observations = fs.sim_runner(observatory, scheduler, survey_length=survey_length, filename=filename)
surveys.append(fs.Greedy_survey_fields(bfs, weights, block_size=1, filtername=filtername, dither=True, nside=nside)) surveys.append(fs.Pairs_survey_scripted([], [], ignore_obs='DD')) # Set up the DD dd_survey = fs.Scripted_survey([], []) names = ['RA', 'dec', 'mjd', 'filter'] types = [float, float, float, '|1U'] observations = np.loadtxt('minion_dd.csv', skiprows=1, dtype=list(zip(names, types)), delimiter=',') exptimes = np.zeros(observations.size) exptimes.fill(30.) observations = append_fields(observations, 'exptime', exptimes) nexp = np.zeros(observations.size) nexp.fill(2) observations = append_fields(observations, 'nexp', nexp) notes = np.zeros(observations.size, dtype='|2U') notes.fill('DD') observations = append_fields(observations, 'note', notes) dd_survey.set_script(observations) surveys.append(dd_survey) scheduler = fs.Core_scheduler(surveys, nside=nside) observatory = Speed_observatory(nside=nside) observatory, scheduler, observations = fs.sim_runner(observatory, scheduler, survey_length=survey_length, filename='full_nside32_%i.db' % years, delete_past=True) # 1962m28.940s = 32.7 hr
filtername2=filtername2, ideal_pair_time=pair_time, nside=nside, survey_note=survey_name, ignore_obs='DD', dither=True)) return surveys if __name__ == "__main__": nside = 32 survey_length = 365.25 * 10 # Days years = int(survey_length / 365.25) greedy = gen_greedy_surveys(nside) ddfs = generate_dd_surveys(nside=nside) blobs = generate_blobs(nside) surveys = [ddfs, blobs, greedy] n_visit_limit = None scheduler = Core_scheduler(surveys, nside=nside) observatory = Model_observatory(nside=nside) observatory, scheduler, observations = sim_runner( observatory, scheduler, survey_length=survey_length, filename='baseline2019_%i.db' % years, n_visit_limit=n_visit_limit)
fs.Target_map_basis_function(filtername='y', target_map=target_map)) # Mask everything but the South bfs.append( fs.Quadrant_basis_function(quadrants=['S'], azWidth=az_width)) # throw in the depth percentile for good measure bfs.append(fs.Depth_percentile_basis_function()) weights = np.array([1., 1., 1.]) survey = fs.Marching_army_survey(bfs, weights, npick=40) scheduler = fs.Core_scheduler([survey]) observatory = Speed_observatory() observatory, scheduler, observations = fs.sim_runner( observatory, scheduler, survey_length=survey_length, filename=outdir + '/y_marching_south_%i.db' % az_width) title = 'az width %i' % az_width hp.mollview(scheduler.surveys[0].basis_functions[0]. survey_features['N_obs'].feature, unit='N Visits', title=title) plt.savefig(outdir + '/n_viz.pdf') plt.figure() none = plt.hist(observations['slewtime'], bins=50) plt.xlabel('slewtime (seconds)') plt.ylabel('Count')
mjd_start=observatory.mjd, nside=nside)) bfs.append(fs.Rolling_mask_basis_function(rolling_mask2, year_mod=3, year_offset=1, mjd_start=observatory.mjd, nside=nside)) bfs.append(fs.Rolling_mask_basis_function(rolling_mask2, year_mod=3, year_offset=2, mjd_start=observatory.mjd, nside=nside)) bfs.append(fs.Rolling_mask_basis_function(rolling_mask3, year_mod=3, year_offset=2, mjd_start=observatory.mjd, nside=nside)) bfs.append(fs.Rolling_mask_basis_function(rolling_mask3, year_mod=3, year_offset=3, mjd_start=observatory.mjd, nside=nside)) #bfs.append(fs.Zenith_mask_basis_function(maxAlt=78., penalty=-100, nside=nside)) bfs.append(fs.Slewtime_basis_function(filtername=filtername, nside=nside)) bfs.append(fs.Strict_filter_basis_function(filtername=filtername)) weights = np.array([3.0, 0.4, 1., 0., 0., 0., 0., 0., 0., 3., 3.]) surveys.append(fs.Greedy_survey_fields(bfs, weights, block_size=1, filtername=filtername, dither=True, nside=nside)) surveys.append(fs.Pairs_survey_scripted([], [], ignore_obs='DD', min_alt=20.)) # Set up the DD dd_surveys = fs.generate_dd_surveys() surveys.extend(dd_surveys) scheduler = fs.Core_scheduler(surveys, nside=nside) observatory, scheduler, observations = fs.sim_runner(observatory, scheduler, survey_length=survey_length, filename='feature_rolling_twoThird_%iyrs.db' % years, delete_past=True)
from lsst.sims.speedObservatory import Speed_observatory import lsst.sims.featureScheduler as fs import numpy as np from blob_same_zmask import generate_slair_scheduler import time t0 = time.time() survey_length = 1. years = np.round(survey_length/365.25) nside = fs.set_default_nside(nside=32) scheduler = generate_slair_scheduler() observatory = Speed_observatory(nside=nside, quickTest=False) observatory, scheduler, observations = fs.sim_runner(observatory, scheduler, survey_length=survey_length, filename='blobs_same_zmask%iyrs.db' % years, delete_past=True) trun = time.time() - t0 print('ran in %i, %i minutes=%i hours' % (trun, trun/60., trun/3600.))
weights = np.array([6.0, 0.6, 3., 3., 0., 0., 0., 0., 0., 0.]) if filtername2 is None: survey_name = 'blob, %s' % filtername else: survey_name = 'blob, %s%s' % (filtername, filtername2) surveys.append(Blob_survey(bfs, weights, filtername1=filtername, filtername2=filtername2, ideal_pair_time=pair_time, nside=nside, survey_note=survey_name, ignore_obs='DD', dither=True)) return surveys if __name__ == "__main__": nside = 32 survey_length = 365.25 #365.25*10 # Days years = int(survey_length/365.25) greedy = gen_greedy_surveys(nside) ddfs = generate_dd_surveys(nside=nside) blobs = generate_blobs(nside) surveys = [ddfs, blobs, greedy] n_visit_limit = None scheduler = Core_scheduler(surveys, nside=nside) observatory = Mock_observatory(nside=nside) observatory, scheduler, observations = sim_runner(observatory, scheduler, survey_length=survey_length, filename='cadence_%i.db' % (years), n_visit_limit=n_visit_limit)
# Might want to try ignoring DD observations here, so the DD area gets covered normally--DONE surveys.append( fs.Greedy_survey_fields(bfs, weights, block_size=1, filtername=filtername, dither=True, nside=nside, ignore_obs='DD')) greedy_surveys.append(surveys[-1]) # Set up the DD surveys dd_surveys = fs.generate_dd_surveys() surveys.extend(dd_surveys) survey_list_o_lists = [dd_surveys, pair_surveys, greedy_surveys] scheduler = fs.Core_scheduler(survey_list_o_lists, nside=nside) n_visit_limit = None observatory = Speed_observatory(nside=nside, quickTest=True) observatory, scheduler, observations = fs.sim_runner( observatory, scheduler, survey_length=survey_length, filename='tight_mask_simple_%iyrs.db' % years, delete_past=True, n_visit_limit=n_visit_limit) t1 = time.time() delta_t = t1 - t0 print('ran in %.1f min = %.1f hours' % (delta_t / 60., delta_t / 3600.))
filtername2=filtername2, ideal_pair_time=pair_time, nside=nside, survey_note=survey_name, ignore_obs='DD', dither=True)) return surveys if __name__ == "__main__": nside = 32 survey_length = 365.25 * 10 # Days years = int(survey_length / 365.25) greedy = gen_greedy_surveys(nside) ddfs = generate_dd_surveys(nside=nside) blobs = generate_blobs(nside) surveys = [ddfs, blobs, greedy] n_visit_limit = None scheduler = Core_scheduler(surveys, nside=nside) observatory = Model_observatory(nside=nside) observatory, scheduler, observations = sim_runner( observatory, scheduler, survey_length=survey_length, filename='blob_no_slew_%i.db' % years, n_visit_limit=n_visit_limit)
filtername2=filtername2, ideal_pair_time=pair_time, nside=nside, survey_note=survey_name, ignore_obs='DD', dither=True)) return surveys if __name__ == "__main__": nside = 32 survey_length = 365.25 #365.25*10 # Days years = int(survey_length / 365.25) greedy = gen_greedy_surveys(nside) ddfs = generate_dd_surveys(nside=nside) blobs = generate_blobs(nside) surveys = [ddfs, blobs, greedy] n_visit_limit = None scheduler = Core_scheduler(surveys, nside=nside) observatory = Mock_observatory(nside=nside) observatory, scheduler, observations = sim_runner( observatory, scheduler, survey_length=survey_length, filename='resTest_nside_%i_%i.db' % (nside, years), n_visit_limit=n_visit_limit)
target_map = fs.standard_goals()['r'] filtername = 'r' bfs = [] bfs.append(fs.Depth_percentile_basis_function(filtername=filtername)) bfs.append( fs.Target_map_basis_function(target_map=target_map, filtername=filtername, out_of_bounds_val=hp.UNSEEN)) bfs.append(fs.North_south_patch_basis_function(zenith_min_alt=50.)) bfs.append(fs.Slewtime_basis_function(filtername=filtername)) weights = np.array([1., 0.2, 1., 2.]) survey = fs.Greedy_survey_fields(bfs, weights, block_size=1, filtername=filtername) scheduler = fs.Core_scheduler([survey]) observatory = Speed_observatory() observatory, scheduler, observations = fs.sim_runner( observatory, scheduler, survey_length=survey_length, filename='one_filter_10yr.db', delete_past=True) #real 2218m35.723s 37 hours #user 2183m17.980s #sys 9m35.290s
2. + 10. / 60. + 55 / 3600., sequence='rgizy', nvis=[20, 10, 20, 26, 20], survey_name='DD:COSMOS', reward_value=100, moon_up=None, fraction_limit=0.0185, ha_limits=([0., 0.5], [23.5, 24.]), nside=nside)) surveys.append( fs.Deep_drilling_survey(150.1, 2. + 10. / 60. + 55 / 3600., sequence='u', nvis=[7], ha_limits=([0., .5], [23.5, 24.]), survey_name='DD:u,COSMOS', reward_value=100, moon_up=False, fraction_limit=0.0015, nside=nside)) scheduler = fs.Core_scheduler(surveys, nside=nside) # Required observatory = Speed_observatory(nside=nside, mjd_start=59853.) observatory, scheduler, observations = fs.sim_runner( observatory, scheduler, survey_length=survey_length, filename='from_repo_%iyrs.db' % years, delete_past=True)