def read_pointings(pointings_file): """Function to read a file of pointings, which may be in sexigesimal or decimal degrees""" if path.isfile(pointings_file) == False: print 'ERROR: Cannot find file ' + pointings_file exit() file_lines = open(pointings_file,'r').readlines() pointings = {} for line in file_lines: (ra, dec, name) = line.replace('\n','').split() if ':' in str(ra): (ra, dec) = utilities.sex2decdeg(ra, dec) pointings[name] = (ra, dec) return pointings
def load_xsuperstamp_targets( self, config ): """Method to load the data for targets selected outside the superstamp""" file_data = open( config['xsuperstamp_target_data'], 'r' ).read() json_data = json.loads( file_data ) for target, target_data in json_data['targets'].items(): event = event_classes.K2C9Event() (ra_deg, dec_deg) = utilities.sex2decdeg( target_data['RA'], \ target_data['Dec'] ) event.ogle_ra = ra_deg event.ogle_dec = dec_deg event.ogle_i0 = float( target_data['Io'] ) event.in_superstamp = False event.in_footprint= True event.during_campaign = True self.xsuperstamp_targets[ target ] = event
def calc_footprint( self, pointing ): """Method to calculate the corners of the footprint outline for a pointing given as a tuple of ( ra_centre, dec_centre ) Coordinates can be given in sexigesimal or decimal degree format. """ if ':' in str(pointing[0]): (ra_cen, dec_cen) = utilities.sex2decdeg( pointing ) else: (ra_cen, dec_cen) = pointing corners = [ [ (ra_cen + self.ra_dfov), (dec_cen + self.dec_dfov) ], [ (ra_cen - self.ra_dfov), (dec_cen + self.dec_dfov) ], [ (ra_cen - self.ra_dfov), (dec_cen - self.dec_dfov) ], [ (ra_cen + self.ra_dfov), (dec_cen - self.dec_dfov) ], [ (ra_cen + self.ra_dfov), (dec_cen + self.dec_dfov) ] ] return np.array(corners)
def read_moa_param_files( config ): """Function to read the listing of MOA events""" ts_file_path = path.join( config['moa_data_local_location'], \ config['moa_time_stamp_file'] ) par_file_path = path.join( config['moa_data_local_location'], \ config['moa_lenses_file'] ) updated_file_path = path.join( config['ogle_data_local_location'], \ config['moa_updated_file'] ) moa_data = survey_classes.SurveyData() # Parse the timestamp in the last.changed file: moa_data.last_changed = time_stamp_file( ts_file_path, "%Y-%m-%dT%H:%M:%S" ) # Parse the moa_lenses parameter file: file_lines = open( par_file_path, 'r' ).readlines() moa_data.lenses = {} for line in file_lines: if line.lstrip()[0:1] != '#': (event_id, field, ra, dec, t0_hjd, tE, u0, A0, I0, c) = line.split() if ':' in ra or ':' in dec: (ra_deg, dec_deg) = utilities.sex2decdeg(ra,dec) else: ra_deg = float(ra) dec_deg = float(dec) event = event_classes.Lens() event.set_par('name',event_id) event.set_par('survey_id',field) event.set_par('ra',ra_deg) event.set_par('dec',dec_deg) event.set_par('t0',t0_hjd) event.set_par('te',tE) event.set_par('u0',u0) event.set_par('a0',A0) event.set_par('i0',I0) event.set_par('classification',c) event.origin = 'MOA' moa_data.lenses[event_id] = event moa_data.last_updated = read_update_file( updated_file_path ) return moa_data
def get_kmtnet_parameters(config): '''Function to retrieve all available information on KMTNet-detected events from the HTML table at: http://astroph.chungbuk.ac.kr/~kmtnet/<year>.html''' verbose(config,'Syncing data from KMTNet') # Download the website indexing KMTNet-discovered events for the current year: ts = Time.now() year_string = str(ts.utc.now().value.year) url = 'http://astroph.chungbuk.ac.kr/~kmtnet/' + year_string + '.html' events_index_data = utilities.get_secure_url(url,(None,None)) events_index_data = events_index_data.split('\n') # Parse the index of events. KMTNet don't provide very much information on their discoveries yet. lens_params = {} i = 0 while i < len(events_index_data): entry = events_index_data[i] if 'KMT' in entry: entry = [ entry ] + events_index_data[i+1].split() event_id = entry[0].replace(' ','') (ra_deg, dec_deg) = utilities.sex2decdeg( entry[1], entry[2] ) event = event_classes.Lens() event.set_par('name',event_id) event.set_par('ra',ra_deg) event.set_par('dec',dec_deg) event.origin = 'KMTNET' lens_params[event_id] = event i = i + 2 else: i = i + 1 # KMTNet do not provide a last-updated timestamp anywhere, so setting this to # the current time for now: last_update = datetime.utcnow() verbose(config,'--> Last udpated at: ' + last_update.strftime("%Y-%m-%dT%H:%M:%S")) verbose(config,'--> Downloaded index of ' + str(len(lens_params)) + ' events') return last_update, lens_params
def read_ogle_param_files( config ): """Function to read the listing of OGLE data""" ts_file_path = path.join( config['ogle_data_local_location'], \ config['ogle_time_stamp_file'] ) par_file_path = path.join( config['ogle_data_local_location'], \ config['ogle_lenses_file']+'.*' ) lens_file_list = glob.glob( par_file_path ) updated_file_path = path.join( config['ogle_data_local_location'], \ config['ogle_updated_file'] ) ogle_data = survey_classes.SurveyData() # Parse the timestamp in the last.changed file. The timestamp is given in yyyymmdd.daydecimal format: ogle_data.last_changed = time_stamp_file( ts_file_path, "%Y%m%dTD" ) # Parse the lenses parameter file. # First 2 lines are header, so skipped: ogle_data.lenses = {} for par_file in lens_file_list: file_lines = open( par_file, 'r' ).readlines() for line in file_lines[2:]: (event_id, field, star, ra, dec, t0_hjd, t0_utc, tE, u0, A0, \ dmag, fbl, I_bl, I0) = line.split() if 'OGLE' not in event_id: event_id = 'OGLE-'+event_id (ra_deg, dec_deg) = utilities.sex2decdeg(ra,dec) event = event_classes.Lens() event.set_par('name',event_id) event.set_par('survey_id',field) event.set_par('ra',ra_deg) event.set_par('dec',dec_deg) event.set_par('t0',t0_hjd) event.set_par('te',tE) event.set_par('u0',u0) event.set_par('a0',A0) event.set_par('i0',I0) event.origin = 'OGLE' ogle_data.lenses[event_id] = event ogle_data.last_updated = read_update_file( updated_file_path ) return ogle_data
def build_odin_request(self, config, log=None, debug=False): proposal = { 'proposal_id': config['proposal_id'], 'user_id' : config['user_id'] } if debug == True and log != None: log.info('Building ODIN observation request') log.info('Proposal dictionary: ' + str( proposal )) location = { 'telescope_class' : str(self.tel).replace('a',''), 'site': str(self.site), 'observatory': str(self.observatory) } if debug == True and log != None: log.info('Location dictionary: ' + str( location )) (ra_deg, dec_deg) = utilities.sex2decdeg(self.ra, self.dec) target = { 'name' : str(self.name), 'ra' : ra_deg, 'dec' : dec_deg, 'proper_motion_ra' : 0, 'proper_motion_dec' : 0, 'parallax' : 0, 'epoch' : 2000, } if debug == True and log != None: log.info('Target dictionary: ' + str( target )) constraints = { 'max_airmass': 2.0 } if debug == True and log != None: log.info('Constraints dictionary: ' + str( constraints )) imager = instruments.Instrument(self.tel, self.instrument) self.instrument_class = imager.instrument_class if debug == True and log != None: log.info('Instrument overheads ' + imager.summary() ) self.get_group_id() ur = { 'group_id': self.group_id, 'operator': 'many' } reqList = [] self.ts_submit = datetime.utcnow() + timedelta(seconds=(10*60)) self.ts_expire = self.ts_submit + timedelta(seconds=(self.ttl*24*60*60)) request_start = self.ts_submit while request_start < self.ts_expire: molecule_list = [] for i,exptime in enumerate(self.exposure_times): nexp = self.exposure_counts[i] defocus = 0.0 molecule = { # Required fields 'exposure_time' : exptime, 'exposure_count' : nexp, 'filter' : self.filter, 'type' : 'EXPOSE', 'ag_name' : '', 'ag_mode' : 'Optional', 'instrument_name' : imager.instrument, 'bin_x' : 1, 'bin_y' : 1, 'defocus' : defocus } if debug == True and log != None: log.info(' -> Molecule: ' + str(molecule)) molecule_list.append(molecule) window = float(config['request_window']) * 60.0 * 60.0 exposure_group_length = imager.calc_group_length( nexp, exptime ) request_end = request_start + \ timedelta( seconds= ( exposure_group_length + window ) ) req = { 'observation_note':'', 'observation_type': 'NORMAL', 'target': target , 'windows': [ { 'start': request_start.strftime("%Y-%m-%d %H:%M:%S"), 'end': request_end.strftime("%Y-%m-%d %H:%M:%S") } ], 'fail_count': 0, 'location': location, 'molecules': molecule_list, 'type': 'request', 'constraints': constraints } reqList.append(req) if debug == True and log != None: log.info('Request dictionary: ' + str(req)) request_start = request_end + \ timedelta( seconds= ( self.cadence*24.0*60.0*60.0 ) ) ur['requests'] = reqList ur['type'] = 'compound_request' self.json_request = json.dumps(ur) if debug == True and log != None: log.info(' -> Completed build of observation request')
# -*- coding: utf-8 -*- """ Created on Tue Apr 12 19:47:05 2016 @author: robouser """ import event_classes import utilities event = event_classes.K2C9Event() event.ogle_name = "OGLE-2016-BLG-0001" event.ogle_alert = "2016-05-02T12:00:51" (ra, dec) = utilities.sex2decdeg("17:23:24.5", "-27:12:23.4") event.ogle_ra = ra event.ogle_dec = dec position = (event.ogle_ra, event.ogle_dec) event.ogle_alert_hjd = utilities.ts_to_hjd(event.ogle_alert, position) event.moa_name = "MOA-2015-BLG-200" event.moa_alert = "2016-05-01T16:50:51" event.moa_ra = ra event.moa_dec = dec position = (event.moa_ra, event.moa_dec) event.moa_alert_hjd = utilities.ts_to_hjd(event.moa_alert, position) event.set_official_name() print event.official_name
def get_moa_parameters(config, log): '''Function to download the parameters of lensing events detected by the MOA survey. MOA make these available via their websites: https://it019909.massey.ac.nz/moa/alert<year>/alert.html https://it019909.massey.ac.nz/moa/alert<year>/index.dat ''' def get_event_class(line): """Function to extract an event's classification from the HTML table line entry, after removal of the mark-up tags.""" iend = len(line) istart = None i = len(line) while (len(line)-i) < 20 and istart == None: if line[(i-1):i].isalpha() == False: istart = i i = i - 1 if iend != None: classification = line[istart:iend] else: classification = 'microlensing' return classification log.info('Syncing data from MOA') moa_data = survey_classes.SurveyData() years = [ '2014', '2015', '2016' ] # Download the website with MOA alerts, which contains the last updated date. # Note that although the URL prefix has to be https, this isn't actually a secure page # so no login is required. # Note that we draw information for each event from BOTH the HTML alerts # table AND the machine-readable index.html file. This is to avoid a bug # where MOA produce different target coordinates in each place. Those in # the HTML alerts table are correct. ts = Time.now() for year in years: # Download the index of events: url = 'https://it019909.massey.ac.nz/moa/alert' + year + '/index.dat' (events_index_data,msg) = utilities.get_http_page(url) events_index_data = events_index_data.split('\n') # Parse the index of events for entry in events_index_data: if len(entry.replace('\n','').replace(' ','')) > 0: (event_id, field, ra_deg, dec_deg, t0_hjd, tE, u0, \ I0, tmp1, tmp2) = entry.split() if ':' in ra_deg or ':' in dec_deg: (ra_deg, dec_deg) = utilities.sex2decdeg(ra_deg,dec_deg) event = event_classes.Lens() event_name = 'MOA-' + event_id event.set_par('name',event_name) event.set_par('survey_id',field) event.set_par('ra',ra_deg) event.set_par('dec',dec_deg) event.set_par('t0',t0_hjd) event.set_par('te',tE) event.set_par('i0',I0) event.set_par('u0',u0) event.origin = 'MOA' moa_data.lenses[event_name] = event # For the last year only, fetch the last-updated timestamp: url = 'https://it019909.massey.ac.nz/moa/alert' + year + '/alert.html' (alerts_page_data,msg) = utilities.get_http_page(url) alerts_page_data = alerts_page_data.split('\n') for entry in alerts_page_data: line = entry.lstrip() if line[0:2] == '20': name = 'MOA-' + line[0:12] ra = line[12:23] dec = line[23:35] classification = get_event_class(line) if ':' in ra or ':' in dec: (ra_deg, dec_deg) = utilities.sex2decdeg(ra,dec) if name in moa_data.lenses.keys(): lens = moa_data.lenses[name] lens.ra = ra_deg lens.dec = dec_deg lens.classification = classification moa_data.lenses[name] = lens # The last updated timestamp is one of the last lines in this file. # Store this, if the year queried is the most recent one: if year == years[-1]: if 'last updated' in line: t = line.split(' ')[-2] last_changed = datetime.strptime(t.split('.')[0],'%Y-%m-%dT%H:%M:%S') ts_file_path = path.join( config['moa_data_local_location'], \ config['moa_time_stamp_file'] ) fileobj = open( ts_file_path, 'w' ) fileobj.write( t + '\n' ) fileobj.close() log.info('--> Last updated at: '+t) moa_data.last_changed = \ survey_data_utilities.time_stamp_file( ts_file_path, \ "%Y-%m-%dT%H:%M:%S" ) log.info('--> Downloaded index of ' + str(len(moa_data.lenses)) + \ ' events') # The MOA download is read directly from the website and thus produces # no record on disc. Therefore we output one here in a more readable # format than HTML: file_path = path.join( config['moa_data_local_location'], \ config['moa_lenses_file'] ) fileobj = open(file_path,'w') fileobj.write('# Name Field RA Dec T0 TE u0 A0 i0 Classification\n') for event_id, event in moa_data.lenses.items(): fileobj.write( event.summary() + '\n' ) fileobj.close() update_file_path = path.join( config['moa_data_local_location'], \ config['moa_updated_file'] ) moa_data.last_updated = \ survey_data_utilities.write_update_file( update_file_path ) log.info('-> Completed sync of MOA data') return moa_data