Пример #1
0
def get_coops(start, end, sos_name, units, bbox, verbose=False):
    collector = CoopsSos()
    collector.set_bbox(bbox)
    collector.end_time = end
    collector.start_time = start
    collector.variables = [sos_name]
    ofrs = collector.server.offerings
    title = collector.server.identification.title
    config = dict(units=units, sos_name=sos_name,)

    data = collector2table(
        collector=collector,
        config=config,
        col="{} ({})".format(sos_name, units.format(cf_units.UT_ISO_8859_1)),
    )

    # Clean the table.
    table = dict(
        station_name=[s._metadata.get("station_name") for s in data],
        station_code=[s._metadata.get("station_code") for s in data],
        sensor=[s._metadata.get("sensor") for s in data],
        lon=[s._metadata.get("lon") for s in data],
        lat=[s._metadata.get("lat") for s in data],
        depth=[s._metadata.get("depth", "NA") for s in data],
    )

    table = pd.DataFrame(table).set_index("station_name")
    if verbose:
        print("Collector offerings")
        print("{}: {} offerings".format(title, len(ofrs)))
    return data, table
Пример #2
0
def get_coops(start, end, sos_name, units, bbox, verbose=False):
    """
    function to read COOPS data
    We need to retry in case of failure b/c the server cannot handle
    the high traffic during hurricane season.
    """
    print('     >> Get CO-OPS information')

    collector = CoopsSos()
    collector.set_bbox(bbox)
    collector.end_time = end
    collector.start_time = start
    collector.variables = [sos_name]
    ofrs = collector.server.offerings
    title = collector.server.identification.title
    config = dict(
        units=units,
        sos_name=sos_name,
        datum='MSL',            ###Saeed added
    )

    data = collector2table(
        collector=collector,
        config=config,
        col='{} ({})'.format(sos_name, units.format(cf_units.UT_ISO_8859_1))
    )

    # Clean the table.
    table = dict(
        station_name = [s._metadata.get('station_name') for s in data],
        station_code = [s._metadata.get('station_code') for s in data],
        sensor       = [s._metadata.get('sensor')       for s in data],
        lon          = [s._metadata.get('lon')          for s in data],
        lat          = [s._metadata.get('lat')          for s in data],
        depth        = [s._metadata.get('depth', 'NA')  for s in data],
    )

    table = pd.DataFrame(table).set_index('station_name')
    if verbose:
        print('Collector offerings')
        print('{}: {} offerings'.format(title, len(ofrs)))
    return data, table
Пример #3
0
            st_list[station_name]["lat"] = lat_data[i]
            st_list[station_name]["source"] = source
            st_list[station_name]["lon"] = lon_data[i]
            print station_name

    print "number of stations in bbox", len(st_list.keys())
    return st_list


# <markdowncell>

# #COOPS Station Locations

# <codecell>

coops_collector = CoopsSos()
coops_collector.start_time = start_time
coops_collector.end_time = end_time
coops_collector.variables = data_dict["currents"]["sos_name"]
coops_collector.server.identification.title
print coops_collector.start_time, ":", coops_collector.end_time
ofrs = coops_collector.server.offerings
print(len(ofrs))

# <codecell>

print "Date: ", iso_start, " to ", iso_end
box_str = ','.join(str(e) for e in bounding_box)
print "Lat/Lon Box: ", box_str

url = (('http://opendap.co-ops.nos.noaa.gov/ioos-dif-sos/SOS?'
Пример #4
0
 def setUp(self):
     self.c = CoopsSos()
Пример #5
0
    def get_nos_data(self, site, observations, begin_date, units_coverter,
                     db_obj):
        start_time = time.time()
        logger = logging.getLogger(self.__class__.__name__)
        logger.debug("Starting get_nos_data")

        row_entry_date = datetime.now()
        utc_tz = timezone('UTC')
        eastern_tz = timezone('US/Eastern')

        platform_handle = 'nos.%s.met' % (site)
        if db_obj.platformExists(platform_handle) is None:
            obs_list = []
            for obs_setup in observations:
                if site in obs_setup['sites']:
                    for xenia_obs in obs_setup['xenia_obs']:
                        obs_list.append({
                            'obs_name': xenia_obs['xenia_name'],
                            'uom_name': xenia_obs['xenia_units'],
                            's_order': 1
                        })
            db_obj.buildMinimalPlatform(platform_handle, obs_list)
        #Build sensor_id and m_type_id list.
        for obs_setup in observations:
            if site in obs_setup['sites']:
                for xenia_obs in obs_setup['xenia_obs']:
                    m_type_id = db_obj.mTypeExists(xenia_obs['xenia_name'],
                                                   xenia_obs['xenia_units'])
                    sensor_id = db_obj.sensorExists(xenia_obs['xenia_name'],
                                                    xenia_obs['xenia_units'],
                                                    platform_handle, 1)
                    xenia_obs['m_type_id'] = m_type_id
                    xenia_obs['sensor_id'] = sensor_id

        sos_query = CoopsSos()
        #dates.sort(reverse=True)

        logger.debug("Query site: %s for date: %s" % (site, begin_date))
        sos_query.clear()
        #utc_end_date = begin_date.astimezone(utc_tz) + timedelta(hours=24)
        utc_end_date = begin_date.astimezone(utc_tz)
        start_date = begin_date.astimezone(utc_tz) - timedelta(hours=24)

        for obs_setup in observations:
            if site in obs_setup['sites']:
                date_ndx = None
                value_ndx = None
                lat_ndx = None
                lon_ndx = None
                depth_ndx = None

                sos_query.filter(features=[site],
                                 start=start_date,
                                 end=utc_end_date,
                                 variables=[obs_setup['sos_obs_query']])
                try:
                    # results = nos_query.collect()
                    response = sos_query.raw(responseFormat="text/csv")
                except Exception as e:
                    logger.exception(e)
                else:
                    csv_reader = csv.reader(
                        response.decode('utf-8').split('\n'), delimiter=',')
                    line_cnt = 0

                    for row in csv_reader:
                        for xenia_obs_setup in obs_setup['xenia_obs']:
                            obs_type = xenia_obs_setup['xenia_name']
                            uom_type = xenia_obs_setup['xenia_units']
                            s_order = 1

                            if line_cnt > 0 and len(row):
                                obs_date = datetime.strptime(
                                    row[date_ndx], '%Y-%m-%dT%H:%M:%SZ')
                                try:
                                    obs_val = float(row[value_ndx])
                                except ValueError as e:
                                    logger.exception(e)
                                    obs_val = 0.0
                                try:
                                    if depth_ndx is not None:
                                        depth = float(row[depth_ndx])
                                    else:
                                        depth = 0
                                except ValueError as e:
                                    logger.exception(e)
                                    depth = 0
                                try:
                                    if lat_ndx is not None:
                                        latitude = float(row[lat_ndx])
                                    if lon_ndx is not None:
                                        longitude = float(row[lon_ndx])
                                except ValueError as e:
                                    logger.exception(e)
                                    latitude = 0.0
                                    longitude = 0.0

                                obs_rec = multi_obs(
                                    row_entry_date=row_entry_date,
                                    platform_handle=platform_handle,
                                    sensor_id=xenia_obs_setup['sensor_id'],
                                    m_type_id=xenia_obs_setup['m_type_id'],
                                    m_date=obs_date.strftime(
                                        '%Y-%m-%dT%H:%M:%S'),
                                    m_lon=longitude,
                                    m_lat=latitude,
                                    m_z=depth,
                                    m_value=obs_val,
                                )

                                rec_id = db_obj.addRec(obs_rec, True)
                                if rec_id is not None:
                                    logger.debug("Adding obs: %s(%s) Date: %s Value: %s S_Order: %d" % \
                                                 (obs_type, uom_type, obs_date, obs_val, s_order))
                                else:
                                    logger.error("Failed adding obs: %s(%s) Date: %s Value: %s S_Order: %d" % \
                                                 (obs_type, uom_type, obs_date, obs_val, s_order))
                            else:
                                if value_ndx is None:
                                    for ndx, val in enumerate(row):
                                        if val.lower().find(
                                                xenia_obs_setup['sos_obs_name']
                                        ) != -1:
                                            value_ndx = ndx
                                        if val.lower().find('date_time') != -1:
                                            date_ndx = ndx
                                        if val.lower().find('latitude') != -1:
                                            lat_ndx = ndx
                                        if val.lower().find('longitude') != -1:
                                            lon_ndx = ndx
                                        if val.lower().find('depth') != -1:
                                            depth_ndx = ndx
                        line_cnt += 1
        """
    for single_obs in nos_obs:
      obs_type = nos_obs[single_obs]['xenia_name']
      uom_type = nos_obs[single_obs]['xenia_units']
      s_order = 1

      nos_query.filter(features=[site], start=start_date, end=utc_end_date, variables=[single_obs])
      try:
        #results = nos_query.collect()
        response = nos_query.raw(responseFormat="text/csv")
      except Exception as e:
        logger.exception(e)
      else:
        csv_reader = csv.reader(response.split('\n'), delimiter=',')
        line_cnt = 0
        for row in csv_reader:
          if line_cnt > 0 and len(row):
            obs_date = datetime.strptime(row[4], '%Y-%m-%dT%H:%M:%SZ')
            obs_val = float(row[5])
            logger.debug("Adding obs: %s(%s) Date: %s Value: %s S_Order: %d" %\
                         (obs_type, uom_type, obs_date, obs_val, s_order))



          line_cnt += 1
    """
        logger.debug("Finished get_nos_data in %f seconds" %
                     (time.time() - start_time))

        return
def get_nos_data(site, dates, units_coverter, db_obj):
    start_time = time.time()
    logger = logging.getLogger(__name__)
    logger.debug("Starting get_nos_data")

    row_entry_date = datetime.now()
    utc_tz = timezone('UTC')
    eastern_tz = timezone('US/Eastern')

    platform_handle = 'nos.%s.met' % (site)
    if db_obj.platformExists(platform_handle) == -1:
        obs_list = []
        for single_obs in nos_obs:
            obs_list.append({
                'obs_name': nos_obs[single_obs]['xenia_name'],
                'uom_name': nos_obs[single_obs]['xenia_units'],
                's_order': 1
            })
        db_obj.buildMinimalPlatform(platform_handle, obs_list)

    nos_query = CoopsSos()
    #dates.sort(reverse=True)
    for rec_date in dates:
        logger.debug("Query site: %s for date: %s" % (site, rec_date))
        nos_query.clear()
        utc_end_date = rec_date.astimezone(utc_tz) + timedelta(hours=24)
        start_date = rec_date.astimezone(utc_tz) - timedelta(hours=24)

        for single_obs in nos_obs:
            obs_type = nos_obs[single_obs]['xenia_name']
            uom_type = nos_obs[single_obs]['xenia_units']
            s_order = 1

            nos_query.filter(features=[site],
                             start=start_date,
                             end=utc_end_date,
                             variables=[single_obs])
            try:
                #results = nos_query.collect()
                response = nos_query.raw(responseFormat="text/csv")
            except Exception as e:
                logger.exception(e)
            else:
                csv_reader = csv.reader(response.split('\n'), delimiter=',')
                line_cnt = 0
                for row in csv_reader:
                    if line_cnt > 0 and len(row):
                        obs_date = datetime.strptime(row[4],
                                                     '%Y-%m-%dT%H:%M:%SZ')
                        obs_val = float(row[5])
                        logger.debug("Adding obs: %s(%s) Date: %s Value: %s S_Order: %d" %\
                                     (obs_type, uom_type, obs_date, obs_val, s_order))

                        if not db_obj.addMeasurement(
                                obs_type,
                                uom_type,
                                platform_handle,
                                obs_date.strftime('%Y-%m-%dT%H:%M:%S'),
                                float(row[2]),
                                float(row[3]),
                                0, [obs_val],
                                sOrder=s_order,
                                autoCommit=True,
                                rowEntryDate=row_entry_date):
                            logger.error(db_obj.lastErrorMsg)

                    line_cnt += 1

    logger.debug("Finished get_nos_data in %f seconds" %
                 (time.time() - start_time))

    return
    except (IOError, OSError, RuntimeError, TimeoutError) as e:
        print("Could not access URL {}.html\n{!r}".format(url, e))

dap_urls = non_stations

print(fmt(" Filtered DAP "))
for url in dap_urls:
    print("{}.html".format(url))

Now we have a nice list of all the models available in the catalog for the domain we specified.
We still need to find the observations for the same domain.
To accomplish that we will use the `pyoos` library and search the [SOS CO-OPS](https://opendap.co-ops.nos.noaa.gov/ioos-dif-sos/) services using the virtually the same configuration options from the catalog search.

from pyoos.collectors.coops.coops_sos import CoopsSos

collector_coops = CoopsSos()

collector_coops.set_bbox(config["region"]["bbox"])
collector_coops.end_time = config["date"]["stop"]
collector_coops.start_time = config["date"]["start"]
collector_coops.variables = [config["sos_name"]]

ofrs = collector_coops.server.offerings
title = collector_coops.server.identification.title
print(fmt(" Collector offerings "))
print("{}: {} offerings".format(title, len(ofrs)))

To make it easier to work with the data we extract the time-series as pandas tables and interpolate them to a common 1-hour interval index.

import pandas as pd
from ioos_tools.ioos import collector2table
def process_nos8661070_data(platform_handle, units_converter, xenia_db,
                            unique_dates):

    logger = logging.getLogger(__name__)
    utc_tz = timezone('UTC')
    eastern_tz = timezone('US/Eastern')
    row_entry_date = datetime.now()

    platform_name_parts = platform_handle.split('.')
    """
  Create a data collection object.
  Contructor parameters are:
    url - THe SWE endpoint we're interested in
    version - Optional default is '1.0.0' The SWE version the endpoint.
    xml - Optional default is None - The XML response from a GetCapabilities query of the server.
  """
    dataCollector = CoopsSos()
    """
  obs_list = ['http://mmisw.org/ont/cf/parameter/water_surface_height_above_reference_datum',
            'http://mmisw.org/ont/cf/parameter/sea_water_temperature',
            'http://mmisw.org/ont/cf/parameter/wind_speed',
            'http://mmisw.org/ont/cf/parameter/wind_from_direction']
  obs_list = [('water_surface_height_above_reference_datum', 'm'),
             ('sea_water_temperature', 'celsius'),
             ('wind_speed', 'm_s-1'),
              ('wind_from_direction', 'degrees_true')]
  """
    nos_to_xenia = {
        "water_surface_height_above_reference_datum": {
            "units": "m",
            "xenia_name": "water_level",
            "xenia_units": "m"
        },
        "sea_water_temperature": {
            "units": "celsius",
            "xenia_name": "water_temperature",
            "xenia_units": "celsius"
        },
        "wind_speed": {
            "units": "m_s-1",
            "xenia_name": "wind_speed",
            "xenia_units": "m_s-1"
        },
        "wind_from_direction": {
            "units": "degrees_true",
            "xenia_name": "wind_from_direction",
            "xenia_units": "degrees_true"
        }
    }
    #nos_obs = nos_to_xenia.keys()
    nos_obs = ['sea_water_temperature']
    if xenia_db.platformExists(platform_handle) == -1:
        s_order = 1
        obs_list = []
        for obs_key in nos_to_xenia:
            obs_info = nos_to_xenia[obs_key]
            obs_list.append({
                'obs_name': obs_info['xenia_name'],
                'uom_name': obs_info['xenia_units'],
                's_order': s_order
            })
        xenia_db.buildMinimalPlatform(platform_handle, obs_list)
    for start_date in unique_dates:
        utc_start_date = (eastern_tz.localize(
            datetime.strptime(start_date, '%Y-%m-%d'))).astimezone(utc_tz)
        start_date = utc_start_date - timedelta(hours=24)
        logger.debug("Platform: %s Begin Date: %s End Date: %s" %
                     (platform_handle, start_date, utc_start_date))
        for single_obs in nos_obs:
            obs_type = nos_to_xenia[single_obs]['xenia_name']
            uom_type = nos_to_xenia[single_obs]['xenia_units']
            s_order = 1
            dataCollector.filter(features=['8661070'],
                                 variables=[single_obs],
                                 start=start_date,
                                 end=utc_start_date)
            try:
                response = dataCollector.raw(responseFormat="text/csv")
            except Exception as e:
                logger.exception(e)
            else:
                csv_reader = csv.reader(response.split('\n'), delimiter=',')
                line_cnt = 0
                for row in csv_reader:
                    if line_cnt > 0 and len(row):
                        obs_date = datetime.strptime(row[4],
                                                     '%Y-%m-%dT%H:%M:%SZ')
                        obs_val = float(row[5])
                        logger.debug("Adding obs: %s(%s) Date: %s Value: %s S_Order: %d" %\
                                     (single_obs, uom_type, obs_date, obs_val, s_order))
                        if not xenia_db.addMeasurement(
                                obs_type,
                                uom_type,
                                platform_handle,
                                obs_date.strftime('%Y-%m-%dT%H:%M:%S'),
                                float(row[2]),
                                float(row[3]),
                                0, [obs_val],
                                sOrder=s_order,
                                autoCommit=True,
                                rowEntryDate=row_entry_date):
                            logger.error(xenia_db.lastErrorMsg)

                    line_cnt += 1