def testVisit_repeat_basis_function(self): bf = basis_functions.Visit_repeat_basis_function() indx = np.array([1000]) # 30 minute step delta = 30./60./24. # Add 1st observation, should still be zero obs = empty_observation() obs['filter'] = 'r' obs['mjd'] = 59000. conditions = Conditions() conditions.mjd = np.max(obs['mjd']) bf.add_observation(obs, indx=indx) self.assertEqual(np.max(bf(conditions)), 0.) # Advance time so now we want a pair conditions.mjd += delta self.assertEqual(np.max(bf(conditions)), 1.) # Now complete the pair and it should go back to zero bf.add_observation(obs, indx=indx) conditions.mjd += delta self.assertEqual(np.max(bf(conditions)), 0.)
def add_observation(self, observation, indx=None, **kwargs): """Add an observed observation """ # self.ignore_obs not in str(observation['note']) to_ignore = np.any( [ignore in str(observation['note']) for ignore in self.ignore_obs]) log.debug('[Pairs.add_observation]: %s: %s: %s', to_ignore, str(observation['note']), self.ignore_obs) log.debug('[Pairs.add_observation.queue]: %s', self.observing_queue) if not to_ignore: # Update my extra features: for feature in self.extra_features: if hasattr(self.extra_features[feature], 'add_observation'): self.extra_features[feature].add_observation(observation, indx=indx) self.reward_checked = False # Check if this observation needs a pair # XXX--only supporting single pairs now. Just start up another scripted survey # to grab triples, etc? Or add two observations to queue at a time? # keys_to_copy = ['RA', 'dec', 'filter', 'exptime', 'nexp'] if ((observation['filter'][0] in self.filt_to_pair) and (np.max(self.extra_features['Pair_map'].feature[indx]) < 1)): obs_to_queue = empty_observation() for key in observation.dtype.names: obs_to_queue[key] = observation[key] # Fill in the ideal time we would like this observed log.debug('Observation MJD: %.4f (dt=%.4f)', obs_to_queue['mjd'], self.dt) obs_to_queue['mjd'] += self.dt self.observing_queue.append(obs_to_queue) log.debug('[Pairs.add_observation.queue.size]: %i', len(self.observing_queue)) for obs in self.observing_queue: log.debug('[Pairs.add_observation.queue]: %s', obs)
def basic_sequence(ra, dec, survey_name='', sequence='urgizy', nvis=[8, 20, 10, 20, 26, 20], exptime=30., u_exptime=30., nexp=1): """Generate the list of observations that should happen in a ddf sequence """ observations = [] for num, filtername in zip(nvis, sequence): for j in range(num): obs = empty_observation() obs['filter'] = filtername if filtername == 'u': obs['exptime'] = u_exptime else: obs['exptime'] = exptime obs['RA'] = np.radians(ra) obs['dec'] = np.radians(dec) obs['nexp'] = nexp obs['note'] = survey_name observations.append(obs) return np.array(observations)
def basic_sequence(ra, dec, survey_name='', sequence='urgizy', nvis=[8, 20, 10, 20, 26, 20], exptime=30., u_exptime=30., nexp=2, u_nexp=2): """Generate the list of observations that should happen in a ddf sequence """ observations = [] for num, filtername in zip(nvis, sequence): # XXX--in theory, we could use decimal nvis and do a random number draw here, so # nvis=2.5 means 2 half the time and 3 half the time. for j in range(num): obs = empty_observation() obs['filter'] = filtername if filtername == 'u': obs['exptime'] = u_exptime obs['nexp'] = u_nexp else: obs['exptime'] = exptime obs['nexp'] = nexp obs['RA'] = np.radians(ra) obs['dec'] = np.radians(dec) obs['note'] = survey_name observations.append(obs) return np.array(observations)
def _slice2obs(self, obs_row): """take a slice and return a full observation object """ observation = empty_observation() for key in [ 'RA', 'dec', 'filter', 'exptime', 'nexp', 'note', 'field_id' ]: observation[key] = obs_row[key] return observation
def generate_observations_rough(self, conditions): """ Returns ------- one of: 1) None 2) A list of observations """ # If the reward function hasn't been updated with the # latest info, calculate it if not self.reward_checked: self.reward = self.calc_reward_function(conditions) obs = empty_observation() return [obs]
def __init__(self, basis_functions, RA, dec, sequence='rgizy', nvis=[20, 10, 20, 26, 20], exptime=30., u_exptime=30., nexp=2, ignore_obs=None, survey_name='DD', reward_value=None, readtime=2., filter_change_time=120., nside=None, flush_pad=30., seed=42, detailers=None): super(Deep_drilling_survey, self).__init__(nside=nside, basis_functions=basis_functions, detailers=detailers, ignore_obs=ignore_obs) random.seed(a=seed) self.ra = np.radians(RA) self.ra_hours = RA/360.*24. self.dec = np.radians(dec) self.survey_name = survey_name self.reward_value = reward_value self.flush_pad = flush_pad/60./24. # To days self.filter_sequence = [] if type(sequence) == str: self.observations = [] for num, filtername in zip(nvis, sequence): for j in range(num): obs = empty_observation() obs['filter'] = filtername if filtername == 'u': obs['exptime'] = u_exptime else: obs['exptime'] = exptime obs['RA'] = self.ra obs['dec'] = self.dec obs['nexp'] = nexp obs['note'] = survey_name self.observations.append(obs) else: self.observations = sequence # Let's just make this an array for ease of use self.observations = np.concatenate(self.observations) order = np.argsort(self.observations['filter']) self.observations = self.observations[order] n_filter_change = np.size(np.unique(self.observations['filter'])) # Make an estimate of how long a seqeunce will take. Assumes no major rotational or spatial # dithering slowing things down. self.approx_time = np.sum(self.observations['exptime']+readtime*self.observations['nexp'])/3600./24. \ + filter_change_time*n_filter_change/3600./24. # to days if self.reward_value is None: self.extra_features['Ntot'] = features.N_obs_survey() self.extra_features['N_survey'] = features.N_obs_survey(note=self.survey_name)
def generate_observations_rough(self, conditions): """ Just point at the highest reward healpix """ self.reward = self.calc_reward_function(conditions) # Check if we need to spin the tesselation if self.dither & (conditions.night != self.night): self._spin_fields() self.night = conditions.night.copy() # Let's find the best N from the fields order = np.argsort(self.reward)[::-1] # Crop off any NaNs order = order[~np.isnan(self.reward[order])] iter = 0 while True: best_hp = order[iter * self.block_size:(iter + 1) * self.block_size] best_fields = np.unique(self.hp2fields[best_hp]) observations = [] for field in best_fields: obs = empty_observation() obs['RA'] = self.fields['RA'][field] obs['dec'] = self.fields['dec'][field] obs['rotSkyPos'] = 0. obs['filter'] = self.filtername obs['nexp'] = self.nexp obs['exptime'] = self.exptime obs['field_id'] = -1 obs['note'] = self.survey_name observations.append(obs) break iter += 1 if len(observations) > 0 or (iter + 2) * self.block_size > len(order): break return observations
def testPair_in_night(self): pin = features.Pair_in_night(gap_min=25., gap_max=45.) self.assertEqual(np.max(pin.feature), 0.) indx = np.array([1000]) delta = 30. / 60. / 24. # Add 1st observation, feature should still be zero obs = empty_observation() obs['filter'] = 'r' obs['mjd'] = 59000. pin.add_observation(obs, indx=indx) self.assertEqual(np.max(pin.feature), 0.) # Add 2nd observation obs['mjd'] += delta pin.add_observation(obs, indx=indx) self.assertEqual(np.max(pin.feature), 1.) obs['mjd'] += delta pin.add_observation(obs, indx=indx) self.assertEqual(np.max(pin.feature), 2.)
def generate_dd_surveys(nside=None, nexp=2, detailers=None, reward_value=100, frac_total=0.0185/2., aggressive_frac=0.011/2., exptime=30, u_exptime=30, nvis_master=[8, 20, 10, 20, 26, 20], delays=[0., 0.5, 1.5]): """Utility to return a list of standard deep drilling field surveys. XXX-Someone double check that I got the coordinates right! """ surveys = [] # ELAIS S1 RA = 9.45 dec = -44. survey_name = 'DD:ELAISS1' ha_limits = ([0., 1.5], [21.5, 24.]) bfs = dd_bfs(RA, dec, survey_name, ha_limits, frac_total=frac_total, aggressive_frac=aggressive_frac, delays=delays) surveys.append(Deep_drilling_survey(bfs, RA, dec, sequence='urgizy', nvis=nvis_master, exptime=exptime, u_exptime=u_exptime, survey_name=survey_name, reward_value=reward_value, nside=nside, nexp=nexp, detailers=detailers)) # XMM-LSS survey_name = 'DD:XMM-LSS' RA = 35.708333 dec = -4-45/60. ha_limits = ([0., 1.5], [21.5, 24.]) bfs = dd_bfs(RA, dec, survey_name, ha_limits, frac_total=frac_total, aggressive_frac=aggressive_frac, delays=delays) #surveys.append(Deep_drilling_survey(bfs, RA, dec, sequence='urgizy', exptime=exptime, u_exptime=u_exptime, # nvis=nvis_master, survey_name=survey_name, reward_value=reward_value, # nside=nside, nexp=nexp, detailers=detailers)) # Extended Chandra Deep Field South RA = 53.125 dec = -28.-6/60. survey_name = 'DD:ECDFS' ha_limits = [[0.5, 3.0], [20., 22.5]] bfs = dd_bfs(RA, dec, survey_name, ha_limits, frac_total=frac_total, aggressive_frac=aggressive_frac, delays=delays) surveys.append(Deep_drilling_survey(bfs, RA, dec, sequence='urgizy', nvis=nvis_master, exptime=exptime, u_exptime=u_exptime, survey_name=survey_name, reward_value=reward_value, nside=nside, nexp=nexp, detailers=detailers)) # COSMOS RA = 150.1 dec = 2.+10./60.+55/3600. survey_name = 'DD:COSMOS' ha_limits = ([0., 2.5], [21.5, 24.]) bfs = dd_bfs(RA, dec, survey_name, ha_limits, frac_total=frac_total, aggressive_frac=aggressive_frac, delays=delays) #surveys.append(Deep_drilling_survey(bfs, RA, dec, sequence='urgizy', # nvis=nvis_master, exptime=exptime, u_exptime=u_exptime, # survey_name=survey_name, reward_value=reward_value, nside=nside, # nexp=nexp, detailers=detailers)) # Euclid Fields # I can use the sequence kwarg to do two positions per sequence filters = 'urgizy' nviss = nvis_master survey_name = 'DD:EDFS' # Note the sequences need to be in radians since they are using observation objects directly RAs = np.radians([58.97, 63.6]) decs = np.radians([-49.28, -47.60]) sequence = [] for filtername, nvis in zip(filters, nviss): for ra, dec in zip(RAs, decs): for num in range(nvis): obs = empty_observation() obs['filter'] = filtername if filtername == 'u': obs['exptime'] = u_exptime else: obs['exptime'] = exptime obs['RA'] = ra obs['dec'] = dec obs['nexp'] = nexp obs['note'] = survey_name sequence.append(obs) ha_limits = ([0., 1.5], [22.5, 24.]) # And back to degrees for the basis function bfs = dd_bfs(np.degrees(RAs[0]), np.degrees(decs[0]), survey_name, ha_limits, frac_total=frac_total, aggressive_frac=aggressive_frac, delays=delays) surveys.append(Deep_drilling_survey(bfs, RA, dec, sequence=sequence, survey_name=survey_name, reward_value=reward_value, nside=nside, nexp=nexp, detailers=detailers)) return surveys
def generate_observations_rough(self, conditions): """ Find a good block of observations. """ self.reward = self.calc_reward_function(conditions) # Check if we need to spin the tesselation if self.dither & (conditions.night != self.night): self._spin_fields() self.night = conditions.night.copy() if self.grow_blob: # Note, returns highest first ordered_hp = hp_grow_argsort(self.reward) ordered_fields = self.hp2fields[ordered_hp] orig_order = np.arange(ordered_fields.size) # Remove duplicate field pointings _u_of, u_indx = np.unique(ordered_fields, return_index=True) new_order = np.argsort(orig_order[u_indx]) best_fields = ordered_fields[u_indx[new_order]] if np.size(best_fields) < self.nvisit_block: # Let's fall back to the simple sort self.simple_order_sort() else: self.best_fields = best_fields[0:self.nvisit_block] else: self.simple_order_sort() if len(self.best_fields) == 0: # everything was nans, or self.nvisit_block was zero return [] # Let's find the alt, az coords of the points (right now, hopefully doesn't change much in time block) pointing_alt, pointing_az = _approx_RaDec2AltAz( self.fields['RA'][self.best_fields], self.fields['dec'][self.best_fields], conditions.site.latitude_rad, conditions.site.longitude_rad, conditions.mjd, lmst=conditions.lmst) # Let's find a good spot to project the points to a plane mid_alt = (np.max(pointing_alt) - np.min(pointing_alt)) / 2. # Code snippet from MAF for computing mean of angle accounting for wrap around # XXX-TODO: Maybe move this to sims_utils as a generally useful snippet. x = np.cos(pointing_az) y = np.sin(pointing_az) meanx = np.mean(x) meany = np.mean(y) angle = np.arctan2(meany, meanx) radius = np.sqrt(meanx**2 + meany**2) mid_az = angle % (2. * np.pi) if radius < 0.1: mid_az = np.pi # Project the alt,az coordinates to a plane. Could consider scaling things to represent # time between points rather than angular distance. pointing_x, pointing_y = gnomonic_project_toxy(pointing_az, pointing_alt, mid_az, mid_alt) # Round off positions so that we ensure identical cross-platform performance scale = 1e6 pointing_x = np.round(pointing_x * scale).astype(int) pointing_y = np.round(pointing_y * scale).astype(int) # Now I have a bunch of x,y pointings. Drop into TSP solver to get an effiencent route towns = np.vstack((pointing_x, pointing_y)).T # Leaving optimize=False for speed. The optimization step doesn't usually improve much. better_order = tsp_convex(towns, optimize=False) # XXX-TODO: Could try to roll better_order to start at the nearest/fastest slew from current position. observations = [] counter2 = 0 approx_end_time = np.size(better_order) * ( self.slew_approx + self.exptime + self.read_approx * (self.nexp - 1)) flush_time = conditions.mjd + approx_end_time / 3600. / 24. + self.flush_time for i, indx in enumerate(better_order): field = self.best_fields[indx] obs = empty_observation() obs['RA'] = self.fields['RA'][field] obs['dec'] = self.fields['dec'][field] obs['rotSkyPos'] = 0. obs['filter'] = self.filtername1 if self.nexp_dict is None: obs['nexp'] = self.nexp else: obs['nexp'] = self.nexp_dict[self.filtername1] obs['exptime'] = self.exptime obs['field_id'] = -1 obs['note'] = '%s' % (self.survey_note) obs['block_id'] = self.counter obs['flush_by_mjd'] = flush_time # Add the mjd for debugging # obs['mjd'] = conditions.mjd # XXX temp debugging line obs['survey_id'] = i observations.append(obs) counter2 += 1 result = observations return result
def ddf_info(exptime=30., u_exptime=30., nexp=1): """ Basic info for each DDF """ ddf_dict = {} ha_limits_dict = {} # ELAIS S1 RA = 9.45 dec = -44. ddf_dict['DD:ELAISS1'] = basic_sequence(RA, dec, 'DD:ELAISS1', exptime=exptime, u_exptime=u_exptime, nexp=nexp) ha_limits_dict['DD:ELAISS1'] = ([0., 1.5], [21.5, 24.]) # XMM-LSS RA = 35.708333 dec = -4 - 45 / 60. ddf_dict['DD:XMM-LSS'] = basic_sequence(RA, dec, 'DD:XMM-LSS', exptime=exptime, u_exptime=u_exptime, nexp=nexp) ha_limits_dict['DD:XMM-LSS'] = ([0., 1.5], [21.5, 24.]) # Extended Chandra Deep Field South RA = 53.125 dec = -28. - 6 / 60. ddf_dict['DD:ECDFS'] = basic_sequence(RA, dec, 'DD:ECDFS', exptime=exptime, u_exptime=u_exptime, nexp=nexp) ha_limits_dict['DD:ECDFS'] = ([0.5, 3.0], [20., 22.5]) # COSMOS RA = 150.1 dec = 2. + 10. / 60. + 55 / 3600. ddf_dict['DD:COSMOS'] = basic_sequence(RA, dec, 'DD:COSMOS', exptime=exptime, u_exptime=u_exptime, nexp=nexp) ha_limits_dict['DD:COSMOS'] = ([0., 2.5], [21.5, 24.]) # Euclid Fields # I can use the sequence kwarg to do two positions per sequence filters = 'urgizy' nviss = [8, 20, 10, 20, 26, 20] survey_name = 'DD:EDFS' # Note the sequences need to be in radians since they are using observation objects directly RAs = np.radians([58.97, 63.6]) decs = np.radians([-49.28, -47.60]) sequence = [] for filtername, nvis in zip(filters, nviss): for ra, dec in zip(RAs, decs): for num in range(nvis): obs = empty_observation() obs['filter'] = filtername if filtername == 'u': obs['exptime'] = u_exptime else: obs['exptime'] = exptime obs['RA'] = ra obs['dec'] = dec obs['nexp'] = nexp obs['note'] = survey_name sequence.append(obs) ddf_dict[survey_name] = np.array(sequence) ha_limits_dict[survey_name] = ([0., 1.5], [22.5, 24.]) return ddf_dict, ha_limits_dict
def __init__(self, sequence_ids=''): self.sequence_ids = sequence_ids # The ids of all sequence observations... # Start out with an empty observation self.feature = utils.empty_observation()
def __init__(self, survey_name=None): self.survey_name = survey_name # Start out with an empty observation self.feature = utils.empty_observation()
def __init__(self, basis_functions, RA, dec, sequences=None, exptime=30., nexp=1, ignore_obs=None, survey_name='DD_DESC', reward_value=101., readtime=2., filter_change_time=120., nside=None, flush_pad=30., seed=42, detailers=None): super(DESC_ddf, self).__init__(nside=nside, basis_functions=basis_functions, detailers=detailers, ignore_obs=ignore_obs) self.ra = np.radians(RA) self.ra_hours = RA / 360. * 24. self.dec = np.radians(dec) self.survey_name = survey_name self.reward_value = reward_value self.flush_pad = flush_pad / 60. / 24. # To days self.simple_obs = empty_observation() self.simple_obs['RA'] = np.radians(RA) self.simple_obs['dec'] = np.radians(dec) self.simple_obs['exptime'] = exptime self.simple_obs['nexp'] = nexp self.simple_obs['note'] = survey_name # Define the sequences we would like to do if sequences is None: self.sequences = [{ 'u': 2, 'g': 2, 'r': 4, 'i': 8 }, { 'z': 25, 'y': 4 }, None] else: self.sequences = sequences self.approx_times = [] for sequence in self.sequences: if sequence is None: self.approx_times.append(0) else: n_exp_in_seq = np.sum(list(sequence.values())) time_needed = filter_change_time * len(sequence.keys()) time_needed += exptime * n_exp_in_seq time_needed += readtime * n_exp_in_seq * nexp self.approx_times.append(time_needed / 3600. / 24.) # Track what we last tried to do # XXX-this should probably go into self.extra_features or something for consistency. self.sequence_index = 0 self.last_night_observed = -100