def get_events_list( config, log ):
    """Function to scrape the list of events which have been modeled by
    the RTmodel system"""
    
    url = path.join( str(config['root_url']), str(config['year']), 'RTModel.htm' )
    events_list = []
    
    (events_index_data,msg) = utilities.get_http_page(url)
    log.info('Loaded RTmodel URL with status: ' + msg)
    
    for line in events_index_data.split('\n'):
        if 'OB' in line or 'KB' in line:
            entry = line.replace('\n','').split()[0]
            events_list.append( entry ) 
    log.info('Models available for ' + str(len(events_list)) + ' events')
    
    return events_list
def get_event_params( config, event_id, log ):
    """Function to retrieve the parameters of RTmodel's current best-fitting
    model for a given event. 
    The summary page for each event may list one or more models, of which
    we assume the top model has the lowest chisq, i.e. the current best fit, 
    and scrape the relevant parameters, which may include a subset of the
    full parameter set
    """
    
    url = path.join( str(config['root_url']), str(config['year']), \
                        event_id + '.htm' )
    (page_data,msg) = utilities.get_http_page(url,parse=False)
    
    model = event_classes.RTModel()
    model.event_name = utilities.short_to_long_name(event_id)
    model.url = url
    
    # Identify the top model, and extract the first line which contains the
    # parameters.  Separate them from the HTML and set the model parameters.
    page_lines = page_data.split('\n')
    i = 0
    while i < (len(page_lines)):
        entries = page_lines[i].split()
        if len(entries) > 0 and '>Model ' in page_lines[i] and \
                '&chi;<sup>2</sup>' in page_lines[i]:
            idx = i
            i = len(page_lines)
        i = i + 1
    
    line = page_lines[idx]
    entry = line.split('&chi;<sup>2</sup>')[1].replace('=','')
    entry = entry.split('</b>')[0]
    
    model.chisq = float(entry)
    
    params = {'s':'<br>s=', \
                'q':'q', \
                'u0': 'u<sub>0</sub>', \
                'theta': '&theta;', \
                'rho': '&rho;<sub>*</sub>', \
                'tE': 't<sub>E</sub>', \
                't0': 't<sub>0</sub>',\
                'pi_perp': '&pi;<sub>&perp;</sub>',\
                'pi_para': '&pi;<sub>||</sub>'
                }
    
    for key, search_term in params.items():
        try:
            i = line.index(search_term)
            istart = i + line[i:].index('=') + 1
            iend = i + line[i:].index('&plusmn;')
            par = line[i:i+len(search_term)]
            value = float(line[istart:iend])
            istart = iend + len('&plusmn;')
            iend = istart + line[istart:].index('&nbsp;')
            sigma = float(line[istart:iend])
            
            if key == 't0':
                value = 2450000.0 + value
            
            setattr(model,key, value)
            setattr(model,'sig_'+key, sigma)
            
        except ValueError:
            pass
      
    return model
def get_moa_parameters(config, log):
    '''Function to download the parameters of lensing events detected by the MOA survey.  
        MOA make these available via their websites:
        https://it019909.massey.ac.nz/moa/alert<year>/alert.html
        https://it019909.massey.ac.nz/moa/alert<year>/index.dat
        '''
    
    def get_event_class(line):
        """Function to extract an event's classification from the HTML
        table line entry, after removal of the mark-up tags."""
        
        iend = len(line)
        istart = None
        i = len(line)
        while (len(line)-i) < 20 and istart == None:
            if line[(i-1):i].isalpha() == False:
                istart = i
            i = i - 1
        
        if iend != None:
            classification = line[istart:iend]
        else:
            classification = 'microlensing'
        return classification
        
    log.info('Syncing data from MOA')
    moa_data = survey_classes.SurveyData()
    years = [ '2014', '2015', '2016' ]
    
    # Download the website with MOA alerts, which contains the last updated date.
    # Note that although the URL prefix has to be https, this isn't actually a secure page
    # so no login is required.
    # Note that we draw information for each event from BOTH the HTML alerts
    # table AND the machine-readable index.html file.  This is to avoid a bug
    # where MOA produce different target coordinates in each place.  Those in 
    # the HTML alerts table are correct. 
    ts = Time.now()
    for year in years: 
        
        # Download the index of events:
        url = 'https://it019909.massey.ac.nz/moa/alert' + year + '/index.dat'
        (events_index_data,msg) = utilities.get_http_page(url)
        events_index_data = events_index_data.split('\n')
    
        # Parse the index of events
        for entry in events_index_data:
            if len(entry.replace('\n','').replace(' ','')) > 0:
                (event_id, field, ra_deg, dec_deg, t0_hjd, tE, u0, \
                        I0, tmp1, tmp2) = entry.split()
                if ':' in ra_deg or ':' in dec_deg: 
                    (ra_deg, dec_deg) = utilities.sex2decdeg(ra_deg,dec_deg)
                event = event_classes.Lens()
                event_name = 'MOA-' + event_id
                event.set_par('name',event_name)
                event.set_par('survey_id',field)
                event.set_par('ra',ra_deg)
                event.set_par('dec',dec_deg)
                event.set_par('t0',t0_hjd)
                event.set_par('te',tE)
                event.set_par('i0',I0)
                event.set_par('u0',u0)
                event.origin = 'MOA'
                moa_data.lenses[event_name] = event
    
        # For the last year only, fetch the last-updated timestamp:
        url = 'https://it019909.massey.ac.nz/moa/alert' + year + '/alert.html'
        (alerts_page_data,msg) = utilities.get_http_page(url)
        alerts_page_data = alerts_page_data.split('\n')
        
        for entry in alerts_page_data:
            line = entry.lstrip()
            if line[0:2] == '20':
                name = 'MOA-' + line[0:12]
                ra = line[12:23]
                dec = line[23:35]
                classification = get_event_class(line)

                if ':' in ra or ':' in dec: 
                    (ra_deg, dec_deg) = utilities.sex2decdeg(ra,dec)
                    
                if name in moa_data.lenses.keys():
                    lens = moa_data.lenses[name]
                    lens.ra = ra_deg
                    lens.dec = dec_deg
                    lens.classification = classification
                    moa_data.lenses[name] = lens
                    
            # The last updated timestamp is one of the last lines in this file.
            # Store this, if the year queried is the most recent one:
            if year == years[-1]:
                if 'last updated' in line:
                    t = line.split(' ')[-2]
                    last_changed = datetime.strptime(t.split('.')[0],'%Y-%m-%dT%H:%M:%S')
                    ts_file_path = path.join( config['moa_data_local_location'], \
                                config['moa_time_stamp_file'] )
                    fileobj = open( ts_file_path, 'w' )
                    fileobj.write( t + '\n' )
                    fileobj.close()
                    log.info('--> Last updated at: '+t)
                    moa_data.last_changed = \
                        survey_data_utilities.time_stamp_file( ts_file_path, \
                                                "%Y-%m-%dT%H:%M:%S" )

    log.info('--> Downloaded index of ' + str(len(moa_data.lenses)) + \
                        ' events')
                        
    # The MOA download is read directly from the website and thus produces
    # no record on disc.  Therefore we output one here in a more readable 
    # format than HTML:
    file_path = path.join( config['moa_data_local_location'], \
                          config['moa_lenses_file'] )
    fileobj = open(file_path,'w')
    fileobj.write('# Name       Field     RA      Dec     T0       TE      u0      A0    i0    Classification\n')
    for event_id, event in moa_data.lenses.items():
        fileobj.write( event.summary() + '\n' )
    fileobj.close()

    update_file_path = path.join( config['moa_data_local_location'], \
                                        config['moa_updated_file']  )
    moa_data.last_updated = \
        survey_data_utilities.write_update_file( update_file_path )

    log.info('-> Completed sync of MOA data')    
    
    return moa_data