コード例 #1
0
def find_and_list_stations(args):
    """
    Find stations that satisfy search criteria and return a list of
    dicts with their information.
    """
    # dictionary to convert station names to MesoWest network ids
    network_to_mesowest_id = { 'RAWS' : 2, 'NWS' : 1 }
    net_id = network_to_mesowest_id[args.network]

    # retrieve the network page
    content = retrieve_web_page(mesowest_net_url % net_id)
    soup = BeautifulSoup(content)

    # find the second-to-last table, which contains the station list
    # & extract rows into a list of lists
    stations = extract_stations(soup.find_all('table')[-2])

    print('# total stations: %d' % len(stations))

    # if requested, filter stations by name
    if len(args.state) > 0:
        stations = filter(lambda x: x['state'] == args.state, stations)
        print('# stations in %s : %d' % (args.state, len(stations)))

    # only report active stations
    stations = filter(lambda x: x['status'] == 'ACTIVE', stations)
    print('# ACTIVE stations : %d' % len(stations))

    # if requested, filter by variables
    if args.vlist is not None:
        stations = filter(lambda x: observes_variables(x, args.vlist), stations)
        print('# stations yielding %s: %d' % (str(args.vlist), len(stations)))

    return stations
コード例 #2
0
def download_station_data(station_info, out_fmt, tstmp, length_hrs, vlist = None):
    """
    Downloads station observations for the given station and parameters.
    """
    output_map = { 'xls' : 'Excel', 'csv' : 'csv', 'xml' : 'xml' }
    
    params = [ ['product', ''],                 # empty in the form on the website
               ['stn', station_info['code']],   # the station code
               ['unit', '1'],                   # metric units
               ['time', 'GMT'],                 # tstamp will be in GMT
               ['day1', tstmp.day],             # the end timestamp of the measurements
               ['month1', '%02d' % tstmp.month],
               ['year1', tstmp.year],
               ['hour1', tstmp.hour],
               ['hours', length_hrs],
               ['daycalendar', 1],
               ['output', output_map[out_fmt]], # output format (excel/csv/xml)
               ['order', '0']                   # order is always ascending
               ]

    # append all variables
    var_list = vlist if vlist is not None else station_info['vlist']
    if not observes_variables(station_info, var_list):
        return None

    for v in var_list: 
        params.append([v, v])

    # join al internal parameters
    get_rq = mesowest_dl_url + '?' + string.join(map(lambda x: x[0] + '=' + str(x[1]), params), '&')

    # download the observed variables
    content = retrieve_web_page(get_rq)
    return content 
コード例 #3
0
def get_station_info(station_info):
    """
    Return a list all the variables that are measured by a station with the station
    code given, store it in the station_info dictionary and also return it.
    """
    # retrieve web page, parse it and pause slightly
    p = retrieve_web_page(mesowest_station_url % station_info['code'])
    soup = BeautifulSoup(p)
    table = soup.find_all('table')[-2]
    varc = table.find_all('td')[2]
    vlist = [ inp.get('value') for inp in varc.find_all('input') ]
    station_info['vlist'] = vlist

    # retrieve web page with position info for station
    p = retrieve_web_page(mesowest_station_pos % station_info['code'])
    soup = BeautifulSoup(p)
    data = filter(lambda x: x.find(':') > 0, map(string.strip, soup.div.getText().split('\n')))
    d = dict([ s.split(':') for s in data ])
    station_info['elevation'] = int(d['ELEVATION'][:-3]) * 0.3048
    station_info['lat'] = float(d['LATITUDE'])
    station_info['lon'] = float(d['LONGITUDE'])
    station_info['wims'] = d['WIMS ID']
    station_info['mnet'] = d['MNET']
    station_info['name'] = d['NAME']