示例#1
0
class CoopsSosTest(unittest.TestCase):
    def setUp(self):
        self.c = CoopsSos()

    def test_coops_server_id(self):
        assert self.c.server.identification.title == "NOAA.NOS.CO-OPS SOS"
        assert self.c.server.identification.service == 'OGC:SOS'
        assert self.c.server.identification.version == '1.0.0'
        assert self.c.server.identification.abstract == 'NOAA.NOS.CO-OPS Sensor Observation Service (SOS) Server'
        assert self.c.server.identification.keywords == [
            'Air Temperature', 'Barometric Pressure', 'Conductivity',
            'Currents', 'Datums', 'Rain Fall', 'Relative Humidity',
            'Harmonic Constituents', 'Salinity', 'Water Level',
            'Water Level Predictions', 'Water Temperature', 'Winds'
        ]
        assert self.c.server.identification.fees == 'NONE'
        assert self.c.server.identification.accessconstraints == 'NONE'

    def test_coops_describe_sensor(self):
        procedure = self.c.server.offerings[1].procedures[0]
        outputFormat = self.c.server.get_operation_by_name(
            'DescribeSensor').parameters['outputFormat']['values'][0]
        response = self.c.get_metadata(procedure=procedure,
                                       outputFormat=outputFormat)
        assert isinstance(response.systems[0].id, str)

    def test_coops_get_observation(self):
        # COOPS does not support empty eventtime parameters
        eventTime = "2012-10-01T00:00:00Z/2012-10-01T23:59:00Z"
        station = None
        station_names = list(set(['station-9052000']))
        for offering in self.c.server.offerings:
            if offering.id in station_names:
                station = offering
                break

        response = res = self.c.get_raw_data(
            offerings=[station.name],
            responseFormat='text/xml;schema="ioos/0.6.1"',
            observedProperties=[
                'http://mmisw.org/ont/cf/parameter/water_surface_height_above_reference_datum'
            ],
            eventTime=eventTime,
            dataType='VerifiedSixMinute')

        assert isinstance(response, str)
示例#2
0
def get_coops(start, end, sos_name, units, bbox, verbose=False):
    collector = CoopsSos()
    collector.set_bbox(bbox)
    collector.end_time = end
    collector.start_time = start
    collector.variables = [sos_name]
    ofrs = collector.server.offerings
    title = collector.server.identification.title
    config = dict(units=units, sos_name=sos_name,)

    data = collector2table(
        collector=collector,
        config=config,
        col="{} ({})".format(sos_name, units.format(cf_units.UT_ISO_8859_1)),
    )

    # Clean the table.
    table = dict(
        station_name=[s._metadata.get("station_name") for s in data],
        station_code=[s._metadata.get("station_code") for s in data],
        sensor=[s._metadata.get("sensor") for s in data],
        lon=[s._metadata.get("lon") for s in data],
        lat=[s._metadata.get("lat") for s in data],
        depth=[s._metadata.get("depth", "NA") for s in data],
    )

    table = pd.DataFrame(table).set_index("station_name")
    if verbose:
        print("Collector offerings")
        print("{}: {} offerings".format(title, len(ofrs)))
    return data, table
示例#3
0
class CoopsSosTest(unittest.TestCase):

    def setUp(self):
        self.c = CoopsSos()

    def test_coops_server_id(self):
        assert self.c.server.identification.title == "NOAA.NOS.CO-OPS SOS"
        assert self.c.server.identification.service == 'OGC:SOS'
        assert self.c.server.identification.version == '1.0.0'
        assert self.c.server.identification.abstract == 'NOAA.NOS.CO-OPS Sensor Observation Service (SOS) Server'
        assert self.c.server.identification.keywords == ['Air Temperature', 'Barometric Pressure', 'Conductivity', 'Currents', 'Datums', 'Rain Fall', 'Relative Humidity', 'Harmonic Constituents', 'Salinity', 'Water Level', 'Water Level Predictions', 'Water Temperature', 'Winds']
        assert self.c.server.identification.fees == 'NONE'
        assert self.c.server.identification.accessconstraints == 'NONE'

    def test_coops_describe_sensor(self):
        procedure = self.c.server.offerings[1].procedures[0]
        outputFormat = self.c.server.get_operation_by_name('DescribeSensor').parameters['outputFormat']['values'][0]
        response = self.c.get_metadata(procedure=procedure,
                                       outputFormat=outputFormat)
        assert isinstance(response.systems[0].id, str)

    def test_coops_get_observation(self):
        # COOPS does not support empty eventtime parameters
        eventTime = "2012-10-01T00:00:00Z/2012-10-01T23:59:00Z"
        station = None
        station_names = list(set(['station-9052000']))
        for offering in self.c.server.offerings:
            if offering.id in station_names:
                station = offering
                break

        response = res = self.c.get_raw_data(
                                   offerings=[station.name],
                                   responseFormat='text/xml;schema="ioos/0.6.1"',
                                   observedProperties=['http://mmisw.org/ont/cf/parameter/water_surface_height_above_reference_datum'],
                                   eventTime=eventTime,
                                   dataType='VerifiedSixMinute')

        assert isinstance(response, str)
        
示例#4
0
def get_coops(start, end, sos_name, units, bbox, verbose=False):
    """
    function to read COOPS data
    We need to retry in case of failure b/c the server cannot handle
    the high traffic during hurricane season.
    """
    print('     >> Get CO-OPS information')

    collector = CoopsSos()
    collector.set_bbox(bbox)
    collector.end_time = end
    collector.start_time = start
    collector.variables = [sos_name]
    ofrs = collector.server.offerings
    title = collector.server.identification.title
    config = dict(
        units=units,
        sos_name=sos_name,
        datum='MSL',            ###Saeed added
    )

    data = collector2table(
        collector=collector,
        config=config,
        col='{} ({})'.format(sos_name, units.format(cf_units.UT_ISO_8859_1))
    )

    # Clean the table.
    table = dict(
        station_name = [s._metadata.get('station_name') for s in data],
        station_code = [s._metadata.get('station_code') for s in data],
        sensor       = [s._metadata.get('sensor')       for s in data],
        lon          = [s._metadata.get('lon')          for s in data],
        lat          = [s._metadata.get('lat')          for s in data],
        depth        = [s._metadata.get('depth', 'NA')  for s in data],
    )

    table = pd.DataFrame(table).set_index('station_name')
    if verbose:
        print('Collector offerings')
        print('{}: {} offerings'.format(title, len(ofrs)))
    return data, table
# Print out all the SOS Data URL endpoints

# <codecell>

sos_urls = service_urls(csw.records,
                        service='sos:url')
print("\n".join(sos_urls))

# <markdowncell>

# ## 1. Get observations from SOS
# Here we are using a custom class from pyoos to read the CO-OPS SOS.  This is definitely unsavory, as the whole point of using a standard is avoid the need for custom classes for each service.  Need to examine the consequences of removing this and just going with straight SOS service using OWSLib. 

# <codecell>

collector = CoopsSos()

collector.set_datum('NAVD')  # MSL
collector.server.identification.title
collector.start_time = jd_start
collector.end_time = jd_stop
collector.variables = [sos_name]

# <codecell>

ofrs = collector.server.offerings
print(len(ofrs))
for p in ofrs[700:710]:
    print(p)

# <markdowncell>
# <markdowncell>

# ### SOS Requirements
# #### Use Pyoos SOS collector to obtain Observation data from COOPS.

# <codecell>

start_time = datetime.strptime(start_date, '%Y-%m-%d %H:%M')
end_time = datetime.strptime(end_date, '%Y-%m-%d %H:%M')

# <codecell>

iso_start = start_time.strftime('%Y-%m-%dT%H:%M:%SZ')
iso_end = end_time.strftime('%Y-%m-%dT%H:%M:%SZ')

collector = CoopsSos()
collector.set_datum('NAVD')
collector.server.identification.title
collector.start_time = start_time
collector.end_time = end_time
collector.variables = [data_dict["water"]["sos_name"]]

# <codecell>

print("Date: %s to %s" % (iso_start, iso_end))
box_str = ','.join(str(e) for e in box)
print("Lat/Lon Box: %s" % box_str)

# <codecell>

# Grab the sos url and use it for the service.
示例#7
0
from pyoos.collectors.coops.coops_sos import CoopsSos
from pyoos.collectors.ndbc.ndbc_sos import NdbcSos
from owslib.ows import ExceptionReport
from datetime import timedelta
from copy import copy
from StringIO import StringIO

sos_dfs = []
for sos in sos_servers:
    if "co-ops" in sos.lower() or "ndbc" in sos.lower():
        
        # CSV Output
    
        if "co-ops" in sos.lower():
            # Use the COOPS collector
            collector = CoopsSos()
        elif "ndbc" in sos.lower():
            # Use the NDBC collector
            collector = NdbcSos()
        for v in variables_to_query:
            collector.filter(variables=[v])
            collector.filter(bbox=bounding_box)
            new_start = copy(start_date)
            new_end   = copy(start_date)

            # Hold dataframe for periodic concat
            v_frame = None

            while new_end < end_date:
                new_end = min(end_date, new_start + timedelta(days=1))
                collector.filter(start=new_start)
示例#8
0
class CoopsSosTest(unittest.TestCase):
    def setUp(self):
        self.c = CoopsSos()

    def test_coops_server_id(self):
        assert self.c.server.identification.title == "NOAA.NOS.CO-OPS SOS"
        assert self.c.server.identification.service == 'OGC:SOS'
        assert self.c.server.identification.version == '1.0.0'
        assert self.c.server.identification.abstract == 'NOAA.NOS.CO-OPS Sensor Observation Service (SOS) Server'
        assert self.c.server.identification.keywords == [
            'Air Temperature', 'Barometric Pressure', 'Conductivity',
            'Currents', 'Datums', 'Rain Fall', 'Relative Humidity',
            'Harmonic Constituents', 'Salinity', 'Visibility', 'Water Level',
            'Water Level Predictions', 'Water Temperature', 'Winds'
        ]
        assert self.c.server.identification.fees == 'NONE'
        assert self.c.server.identification.accessconstraints == 'NONE'

    def test_coops_describe_sensor(self):
        self.c.features = ['8454000']
        response = self.c.metadata(
            output_format='text/xml;subtype="sensorML/1.0.1"')
        assert isinstance(response[0], SensorML)

    def test_raw_coops_get_observation(self):
        self.c.start_time = datetime.strptime("2012-10-01", "%Y-%m-%d")
        self.c.end_time = datetime.strptime("2012-10-02", "%Y-%m-%d")
        self.c.features = ['8454000']
        self.c.variables = [
            'http://mmisw.org/ont/cf/parameter/water_surface_height_above_reference_datum'
        ]

        response = self.c.raw(responseFormat="text/csv")
        assert isinstance(response, basestring)
        """
        station_id,sensor_id,"latitude (degree)","longitude (degree)",date_time,"water_surface_height_above_reference_datum (m)",datum_id,"vertical_position (m)"
        urn:ioos:station:NOAA.NOS.CO-OPS:8454000,urn:ioos:sensor:NOAA.NOS.CO-OPS:8454000:A1,41.8071,-71.4012,2012-10-01T00:00:00Z,1.465,urn:ioos:def:datum:noaa::MLLW,1.064
        """
        data = list(csv.DictReader(StringIO.StringIO(response)))
        assert data[0][
            'station_id'] == 'urn:ioos:station:NOAA.NOS.CO-OPS:8454000'
        assert data[0]['datum_id'] == "urn:ioos:def:datum:noaa::MLLW"
        assert data[0]['date_time'] == "2012-10-01T00:00:00Z"
        assert data[0][
            'water_surface_height_above_reference_datum (m)'] == "1.465"
        assert data[0]['vertical_position (m)'] == "1.064"

    def test_raw_coops_get_observation_with_dataType(self):
        self.c.start_time = datetime.strptime("2012-10-01", "%Y-%m-%d")
        self.c.end_time = datetime.strptime("2012-10-02", "%Y-%m-%d")
        self.c.features = ['8454000']
        self.c.variables = [
            'http://mmisw.org/ont/cf/parameter/water_surface_height_above_reference_datum'
        ]
        self.c.dataType = "VerifiedHighLow"

        response = self.c.raw(responseFormat="text/csv")
        assert isinstance(response, basestring)
        """
        station_id,sensor_id,"latitude (degree)","longitude (degree)",date_time,"water_surface_height_above_reference_datum (m)",datum_id,"vertical_position (m)"
        urn:ioos:station:NOAA.NOS.CO-OPS:8454000,urn:ioos:sensor:NOAA.NOS.CO-OPS:8454000:W3,41.8071,-71.4012,2012-10-01T01:00:00Z,1.617,urn:ioos:def:datum:noaa::MLLW,1.064
        """
        data = list(csv.DictReader(StringIO.StringIO(response)))
        assert data[0][
            'station_id'] == 'urn:ioos:station:NOAA.NOS.CO-OPS:8454000'
        assert data[0]['datum_id'] == "urn:ioos:def:datum:noaa::MLLW"
        assert data[0]['date_time'] == "2012-10-01T01:00:00Z"
        assert data[0][
            'water_surface_height_above_reference_datum (m)'] == "1.617"
        assert data[0]['vertical_position (m)'] == "1.064"

    def test_raw_coops_get_observation_with_datum(self):
        self.c.start_time = datetime.strptime("2012-10-01", "%Y-%m-%d")
        self.c.end_time = datetime.strptime("2012-10-02", "%Y-%m-%d")
        self.c.features = ['8454000']
        self.c.variables = [
            'http://mmisw.org/ont/cf/parameter/water_surface_height_above_reference_datum'
        ]
        self.c.dataType = "VerifiedHighLow"
        self.c.datum = "NAVD"

        response = self.c.raw(responseFormat="text/csv")
        assert isinstance(response, basestring)
        """
        station_id,sensor_id,"latitude (degree)","longitude (degree)",date_time,"water_surface_height_above_reference_datum (m)",datum_id,"vertical_position (m)"
        urn:ioos:station:NOAA.NOS.CO-OPS:8454000,urn:ioos:sensor:NOAA.NOS.CO-OPS:8454000:W3,41.8071,-71.4012,2012-10-01T01:00:00Z,0.863,urn:ogc:def:datum:epsg::5103,1.818
        """
        data = list(csv.DictReader(StringIO.StringIO(response)))
        assert len(data) == 4
        assert data[0][
            'station_id'] == 'urn:ioos:station:NOAA.NOS.CO-OPS:8454000'
        assert data[0]['datum_id'] == "urn:ogc:def:datum:epsg::5103"
        assert data[0]['date_time'] == "2012-10-01T01:00:00Z"
        assert data[0][
            'water_surface_height_above_reference_datum (m)'] == "0.863"
        assert data[0]['vertical_position (m)'] == "1.818"
        name = s
    names.append(name)

ndbc['name'] = names

ndbc.set_index('name', inplace=True)
to_html(ndbc.head())


# ### CoopsSoS

# In[9]:

from pyoos.collectors.coops.coops_sos import CoopsSos

collector_coops = CoopsSos()

collector_coops.set_bbox(bbox)
collector_coops.end_time = stop
collector_coops.start_time = start
collector_coops.variables = [sos_name]

ofrs = collector_coops.server.offerings
title = collector_coops.server.identification.title
log.info(fmt(' Collector offerings '))
log.info('{}: {} offerings'.format(title, len(ofrs)))


# In[10]:

from utilities import get_coops_metadata
bbox = [-87.40, 24.25, -74.70, 36.70]

sos_name = "water_surface_height_above_reference_datum"

In this example we will use only the `CoopsSos`,
but it is worth mentioning that `pyoos` has other collectors like IOOS SWE,
NcSOS, 52N, NERRS, NDBC, etc.

Pyoos' usage is quite simple, all we have to do is:

- create an instance of the collector we will use and,
- feed the instance with the data for the collection.

from pyoos.collectors.coops.coops_sos import CoopsSos

collector = CoopsSos()

collector.set_bbox(bbox)
collector.end_time = end_time
collector.start_time = start_time
collector.variables = [sos_name]

Let's check we we got with the search above.

ofrs = collector.server.offerings
title = collector.server.identification.title

print("Collector offerings")
print("{}: {} offerings".format(title, len(ofrs)))

OK... That is quite misleading. We did not find 1113 stations with that search.
示例#11
0
class CoopsSosTest(unittest.TestCase):

    def setUp(self):
        self.c = CoopsSos()

    def test_coops_server_id(self):
        assert self.c.server.identification.title == "NOAA.NOS.CO-OPS SOS"
        assert self.c.server.identification.service == 'OGC:SOS'
        assert self.c.server.identification.version == '1.0.0'
        assert self.c.server.identification.abstract == 'NOAA.NOS.CO-OPS Sensor Observation Service (SOS) Server'
        assert self.c.server.identification.keywords == ['Air Temperature', 'Barometric Pressure', 'Conductivity', 'Currents', 'Datum', 'Harmonic Constituents', 'Rain Fall', 'Relative Humidity', 'Salinity', 'Visibility', 'Water Level', 'Water Level Predictions', 'Water Temperature', 'Winds']
        assert self.c.server.identification.fees == 'NONE'
        assert self.c.server.identification.accessconstraints == 'NONE'

    def test_coops_describe_sensor(self):
        self.c.features = ['8454000']
        response = self.c.metadata(output_format='text/xml;subtype="sensorML/1.0.1/profiles/ioos_sos/1.0"')
        assert isinstance(response[0], SensorML)

    def test_raw_coops_get_observation(self):
        self.c.start_time   = datetime.strptime("2012-10-01", "%Y-%m-%d")
        self.c.end_time     = datetime.strptime("2012-10-02", "%Y-%m-%d")
        self.c.features     = ['8454000']
        self.c.variables    = ['http://mmisw.org/ont/cf/parameter/water_surface_height_above_reference_datum']  # noqa

        response = self.c.raw(responseFormat="text/csv").decode()
        assert isinstance(response, string_types)
        """
        station_id,sensor_id,"latitude (degree)","longitude (degree)",date_time,"water_surface_height_above_reference_datum (m)",datum_id,"vertical_position (m)"
        urn:ioos:station:NOAA.NOS.CO-OPS:8454000,urn:ioos:sensor:NOAA.NOS.CO-OPS:8454000:A1,41.8071,-71.4012,2012-10-01T00:00:00Z,1.465,urn:ioos:def:datum:noaa::MLLW,1.064
        """
        data = list(csv.DictReader(io.StringIO(response)))
        assert data[0]['station_id'] == 'urn:ioos:station:NOAA.NOS.CO-OPS:8454000'
        assert data[0]['datum_id'] == "urn:ioos:def:datum:noaa::MLLW"
        assert data[0]['date_time'] == "2012-10-01T00:00:00Z"
        assert data[0]['water_surface_height_above_reference_datum (m)'] == "1.465"
        assert data[0]['vertical_position (m)'] == "1.064"

    def test_raw_coops_get_observation_with_dataType(self):
        self.c.start_time   = datetime.strptime("2012-10-01", "%Y-%m-%d")
        self.c.end_time     = datetime.strptime("2012-10-02", "%Y-%m-%d")
        self.c.features     = ['8454000']
        self.c.variables    = ['http://mmisw.org/ont/cf/parameter/water_surface_height_above_reference_datum']
        self.c.dataType     = "VerifiedHighLow"

        response = self.c.raw(responseFormat="text/csv").decode()
        assert isinstance(response, string_types)
        """
        station_id,sensor_id,"latitude (degree)","longitude (degree)",date_time,"water_surface_height_above_reference_datum (m)",datum_id,"vertical_position (m)"
        urn:ioos:station:NOAA.NOS.CO-OPS:8454000,urn:ioos:sensor:NOAA.NOS.CO-OPS:8454000:W3,41.8071,-71.4012,2012-10-01T01:00:00Z,1.617,urn:ioos:def:datum:noaa::MLLW,1.064
        """
        data = list(csv.DictReader(io.StringIO(response)))
        assert data[0]['station_id'] == 'urn:ioos:station:NOAA.NOS.CO-OPS:8454000'
        assert data[0]['datum_id'] == "urn:ioos:def:datum:noaa::MLLW"
        assert data[0]['date_time'] == "2012-10-01T01:00:00Z"
        assert data[0]['water_surface_height_above_reference_datum (m)'] == "1.617"
        assert data[0]['vertical_position (m)'] == "1.064"

    def test_raw_coops_get_observation_with_datum(self):
        self.c.start_time   = datetime.strptime("2012-10-01", "%Y-%m-%d")
        self.c.end_time     = datetime.strptime("2012-10-02", "%Y-%m-%d")
        self.c.features     = ['8454000']
        self.c.variables    = ['http://mmisw.org/ont/cf/parameter/water_surface_height_above_reference_datum']
        self.c.dataType     = "VerifiedHighLow"
        self.c.datum        = "NAVD"

        response = self.c.raw(responseFormat="text/csv").decode()
        assert isinstance(response, string_types)
        """
        station_id,sensor_id,"latitude (degree)","longitude (degree)",date_time,"water_surface_height_above_reference_datum (m)",datum_id,"vertical_position (m)"
        urn:ioos:station:NOAA.NOS.CO-OPS:8454000,urn:ioos:sensor:NOAA.NOS.CO-OPS:8454000:W3,41.8071,-71.4012,2012-10-01T01:00:00Z,0.863,urn:ogc:def:datum:epsg::5103,1.818
        """
        data = list(csv.DictReader(io.StringIO(response)))
        assert len(data) == 4
        assert data[0]['station_id'] == 'urn:ioos:station:NOAA.NOS.CO-OPS:8454000'
        assert data[0]['datum_id'] == "urn:ogc:def:datum:epsg::5103"
        assert data[0]['date_time'] == "2012-10-01T01:00:00Z"
        assert data[0]['water_surface_height_above_reference_datum (m)'] == "0.863"
        assert data[0]['vertical_position (m)'] == "1.818"
示例#12
0
    def get_nos_data(self, site, observations, begin_date, units_coverter,
                     db_obj):
        start_time = time.time()
        logger = logging.getLogger(self.__class__.__name__)
        logger.debug("Starting get_nos_data")

        row_entry_date = datetime.now()
        utc_tz = timezone('UTC')
        eastern_tz = timezone('US/Eastern')

        platform_handle = 'nos.%s.met' % (site)
        if db_obj.platformExists(platform_handle) is None:
            obs_list = []
            for obs_setup in observations:
                if site in obs_setup['sites']:
                    for xenia_obs in obs_setup['xenia_obs']:
                        obs_list.append({
                            'obs_name': xenia_obs['xenia_name'],
                            'uom_name': xenia_obs['xenia_units'],
                            's_order': 1
                        })
            db_obj.buildMinimalPlatform(platform_handle, obs_list)
        #Build sensor_id and m_type_id list.
        for obs_setup in observations:
            if site in obs_setup['sites']:
                for xenia_obs in obs_setup['xenia_obs']:
                    m_type_id = db_obj.mTypeExists(xenia_obs['xenia_name'],
                                                   xenia_obs['xenia_units'])
                    sensor_id = db_obj.sensorExists(xenia_obs['xenia_name'],
                                                    xenia_obs['xenia_units'],
                                                    platform_handle, 1)
                    xenia_obs['m_type_id'] = m_type_id
                    xenia_obs['sensor_id'] = sensor_id

        sos_query = CoopsSos()
        #dates.sort(reverse=True)

        logger.debug("Query site: %s for date: %s" % (site, begin_date))
        sos_query.clear()
        #utc_end_date = begin_date.astimezone(utc_tz) + timedelta(hours=24)
        utc_end_date = begin_date.astimezone(utc_tz)
        start_date = begin_date.astimezone(utc_tz) - timedelta(hours=24)

        for obs_setup in observations:
            if site in obs_setup['sites']:
                date_ndx = None
                value_ndx = None
                lat_ndx = None
                lon_ndx = None
                depth_ndx = None

                sos_query.filter(features=[site],
                                 start=start_date,
                                 end=utc_end_date,
                                 variables=[obs_setup['sos_obs_query']])
                try:
                    # results = nos_query.collect()
                    response = sos_query.raw(responseFormat="text/csv")
                except Exception as e:
                    logger.exception(e)
                else:
                    csv_reader = csv.reader(
                        response.decode('utf-8').split('\n'), delimiter=',')
                    line_cnt = 0

                    for row in csv_reader:
                        for xenia_obs_setup in obs_setup['xenia_obs']:
                            obs_type = xenia_obs_setup['xenia_name']
                            uom_type = xenia_obs_setup['xenia_units']
                            s_order = 1

                            if line_cnt > 0 and len(row):
                                obs_date = datetime.strptime(
                                    row[date_ndx], '%Y-%m-%dT%H:%M:%SZ')
                                try:
                                    obs_val = float(row[value_ndx])
                                except ValueError as e:
                                    logger.exception(e)
                                    obs_val = 0.0
                                try:
                                    if depth_ndx is not None:
                                        depth = float(row[depth_ndx])
                                    else:
                                        depth = 0
                                except ValueError as e:
                                    logger.exception(e)
                                    depth = 0
                                try:
                                    if lat_ndx is not None:
                                        latitude = float(row[lat_ndx])
                                    if lon_ndx is not None:
                                        longitude = float(row[lon_ndx])
                                except ValueError as e:
                                    logger.exception(e)
                                    latitude = 0.0
                                    longitude = 0.0

                                obs_rec = multi_obs(
                                    row_entry_date=row_entry_date,
                                    platform_handle=platform_handle,
                                    sensor_id=xenia_obs_setup['sensor_id'],
                                    m_type_id=xenia_obs_setup['m_type_id'],
                                    m_date=obs_date.strftime(
                                        '%Y-%m-%dT%H:%M:%S'),
                                    m_lon=longitude,
                                    m_lat=latitude,
                                    m_z=depth,
                                    m_value=obs_val,
                                )

                                rec_id = db_obj.addRec(obs_rec, True)
                                if rec_id is not None:
                                    logger.debug("Adding obs: %s(%s) Date: %s Value: %s S_Order: %d" % \
                                                 (obs_type, uom_type, obs_date, obs_val, s_order))
                                else:
                                    logger.error("Failed adding obs: %s(%s) Date: %s Value: %s S_Order: %d" % \
                                                 (obs_type, uom_type, obs_date, obs_val, s_order))
                            else:
                                if value_ndx is None:
                                    for ndx, val in enumerate(row):
                                        if val.lower().find(
                                                xenia_obs_setup['sos_obs_name']
                                        ) != -1:
                                            value_ndx = ndx
                                        if val.lower().find('date_time') != -1:
                                            date_ndx = ndx
                                        if val.lower().find('latitude') != -1:
                                            lat_ndx = ndx
                                        if val.lower().find('longitude') != -1:
                                            lon_ndx = ndx
                                        if val.lower().find('depth') != -1:
                                            depth_ndx = ndx
                        line_cnt += 1
        """
    for single_obs in nos_obs:
      obs_type = nos_obs[single_obs]['xenia_name']
      uom_type = nos_obs[single_obs]['xenia_units']
      s_order = 1

      nos_query.filter(features=[site], start=start_date, end=utc_end_date, variables=[single_obs])
      try:
        #results = nos_query.collect()
        response = nos_query.raw(responseFormat="text/csv")
      except Exception as e:
        logger.exception(e)
      else:
        csv_reader = csv.reader(response.split('\n'), delimiter=',')
        line_cnt = 0
        for row in csv_reader:
          if line_cnt > 0 and len(row):
            obs_date = datetime.strptime(row[4], '%Y-%m-%dT%H:%M:%SZ')
            obs_val = float(row[5])
            logger.debug("Adding obs: %s(%s) Date: %s Value: %s S_Order: %d" %\
                         (obs_type, uom_type, obs_date, obs_val, s_order))



          line_cnt += 1
    """
        logger.debug("Finished get_nos_data in %f seconds" %
                     (time.time() - start_time))

        return
示例#13
0
 def setUp(self):
     self.c = CoopsSos()
示例#14
0
# coding: utf-8

# In[1]:

from pyoos.collectors.coops.coops_sos import CoopsSos

collector = CoopsSos()


# In[3]:

get_ipython().system(u'conda list pyoos')


# In[ ]:



def get_nos_data(site, dates, units_coverter, db_obj):
    start_time = time.time()
    logger = logging.getLogger(__name__)
    logger.debug("Starting get_nos_data")

    row_entry_date = datetime.now()
    utc_tz = timezone('UTC')
    eastern_tz = timezone('US/Eastern')

    platform_handle = 'nos.%s.met' % (site)
    if db_obj.platformExists(platform_handle) == -1:
        obs_list = []
        for single_obs in nos_obs:
            obs_list.append({
                'obs_name': nos_obs[single_obs]['xenia_name'],
                'uom_name': nos_obs[single_obs]['xenia_units'],
                's_order': 1
            })
        db_obj.buildMinimalPlatform(platform_handle, obs_list)

    nos_query = CoopsSos()
    #dates.sort(reverse=True)
    for rec_date in dates:
        logger.debug("Query site: %s for date: %s" % (site, rec_date))
        nos_query.clear()
        utc_end_date = rec_date.astimezone(utc_tz) + timedelta(hours=24)
        start_date = rec_date.astimezone(utc_tz) - timedelta(hours=24)

        for single_obs in nos_obs:
            obs_type = nos_obs[single_obs]['xenia_name']
            uom_type = nos_obs[single_obs]['xenia_units']
            s_order = 1

            nos_query.filter(features=[site],
                             start=start_date,
                             end=utc_end_date,
                             variables=[single_obs])
            try:
                #results = nos_query.collect()
                response = nos_query.raw(responseFormat="text/csv")
            except Exception as e:
                logger.exception(e)
            else:
                csv_reader = csv.reader(response.split('\n'), delimiter=',')
                line_cnt = 0
                for row in csv_reader:
                    if line_cnt > 0 and len(row):
                        obs_date = datetime.strptime(row[4],
                                                     '%Y-%m-%dT%H:%M:%SZ')
                        obs_val = float(row[5])
                        logger.debug("Adding obs: %s(%s) Date: %s Value: %s S_Order: %d" %\
                                     (obs_type, uom_type, obs_date, obs_val, s_order))

                        if not db_obj.addMeasurement(
                                obs_type,
                                uom_type,
                                platform_handle,
                                obs_date.strftime('%Y-%m-%dT%H:%M:%S'),
                                float(row[2]),
                                float(row[3]),
                                0, [obs_val],
                                sOrder=s_order,
                                autoCommit=True,
                                rowEntryDate=row_entry_date):
                            logger.error(db_obj.lastErrorMsg)

                    line_cnt += 1

    logger.debug("Finished get_nos_data in %f seconds" %
                 (time.time() - start_time))

    return
    except (IOError, OSError, RuntimeError, TimeoutError) as e:
        print("Could not access URL {}.html\n{!r}".format(url, e))

dap_urls = non_stations

print(fmt(" Filtered DAP "))
for url in dap_urls:
    print("{}.html".format(url))

Now we have a nice list of all the models available in the catalog for the domain we specified.
We still need to find the observations for the same domain.
To accomplish that we will use the `pyoos` library and search the [SOS CO-OPS](https://opendap.co-ops.nos.noaa.gov/ioos-dif-sos/) services using the virtually the same configuration options from the catalog search.

from pyoos.collectors.coops.coops_sos import CoopsSos

collector_coops = CoopsSos()

collector_coops.set_bbox(config["region"]["bbox"])
collector_coops.end_time = config["date"]["stop"]
collector_coops.start_time = config["date"]["start"]
collector_coops.variables = [config["sos_name"]]

ofrs = collector_coops.server.offerings
title = collector_coops.server.identification.title
print(fmt(" Collector offerings "))
print("{}: {} offerings".format(title, len(ofrs)))

To make it easier to work with the data we extract the time-series as pandas tables and interpolate them to a common 1-hour interval index.

import pandas as pd
from ioos_tools.ioos import collector2table
示例#17
0
for rec, item in csw.records.items():
    log.info('{}'.format(item.title))

log.info(fmt(' DAP '))
for url in dap_urls:
    log.info('{}.html'.format(url))

log.info(fmt(' SOS '))
for url in sos_urls:
    log.info('{}'.format(url))

# In[9]:

from pyoos.collectors.coops.coops_sos import CoopsSos

collector = CoopsSos()
sos_name = 'water_surface_height_above_reference_datum'

datum = 'NAVD'
collector.set_datum(datum)
collector.end_time = stop
collector.start_time = start
collector.variables = [sos_name]

ofrs = collector.server.offerings
title = collector.server.identification.title
log.info(fmt(' Collector offerings '))
log.info('{}: {} offerings'.format(title, len(ofrs)))

# In[10]:
print "\n".join(sos_urls)

# <markdowncell>

# ### SOS Requirements

# <codecell>

start_time = dt.datetime.strptime(start_date,'%Y-%m-%d %H:%M')
end_time = dt.datetime.strptime(stop_date,'%Y-%m-%d %H:%M')
iso_start = start_time.strftime('%Y-%m-%dT%H:%M:%SZ')
iso_end = end_time.strftime('%Y-%m-%dT%H:%M:%SZ')

# <codecell>

collector = CoopsSos()
collector.start_time = start_time
collector.end_time = end_time
collector.variables = data_dict["winds"]["sos_name"]
collector.server.identification.title
print collector.start_time,":", collector.end_time
ofrs = collector.server.offerings

# <markdowncell>

# ###Find all SOS stations within the bounding box and time extent

# <codecell>

print "Date: ",iso_start," to ", iso_end
box_str=','.join(str(e) for e in bounding_box)
    log.info('{}'.format(item.title))

log.info(fmt(' DAP URLs '))
for url in dap_urls:
    log.info('{}.html'.format(url))

log.info(fmt(' SOS URLs '))
for url in sos_urls:
    log.info('{}'.format(url))


# In[ ]:

from pyoos.collectors.coops.coops_sos import CoopsSos

collector = CoopsSos()
sos_name = 'water_surface_height_above_reference_datum'

datum = 'NAVD'
collector.set_datum(datum)
collector.end_time = stop
collector.start_time = start
collector.variables = [sos_name]

ofrs = collector.server.offerings
title = collector.server.identification.title
log.info(fmt(' Collector offerings '))
log.info('{}: {} offerings'.format(title, len(ofrs)))


# In[ ]:
            st_list[station_name] = {}
            st_list[station_name]["lat"] = lat_data[i]
            st_list[station_name]["source"] = source
            st_list[station_name]["lon"] = lon_data[i]
            print station_name

    print "number of stations in bbox",len(st_list.keys())
    return st_list

# <markdowncell>

# #COOPS Station Locations

# <codecell>

coops_collector = CoopsSos()
coops_collector.start_time = start_time
coops_collector.end_time = end_time
coops_collector.variables = data_dict["currents"]["sos_name"]
coops_collector.server.identification.title
print coops_collector.start_time,":", coops_collector.end_time
ofrs = coops_collector.server.offerings
print(len(ofrs))

# <codecell>

print "Date: ",iso_start," to ", iso_end
box_str=','.join(str(e) for e in bounding_box)
print "Lat/Lon Box: ",box_str

url = (('http://opendap.co-ops.nos.noaa.gov/ioos-dif-sos/SOS?'
示例#21
0
print "Total SOS:",len(sos_urls)
print "\n".join(sos_urls)

# <markdowncell>

# ###Get most recent observations from NOAA-COOPS stations in bounding box

# <codecell>

start_time = dt.datetime.strptime(start_date,'%Y-%m-%d %H:%M')
end_time = dt.datetime.strptime(end_date,'%Y-%m-%d %H:%M')
iso_start = start_time.strftime('%Y-%m-%dT%H:%M:%SZ')
iso_end = end_time.strftime('%Y-%m-%dT%H:%M:%SZ')

# Define the Coops collector
collector = CoopsSos()
print collector.server.identification.title
collector.variables = data_dict["temp"]["sos_name"]
collector.server.identification.title

# Don't specify start and end date in the filter and the most recent observation will be returned
collector.filter(bbox=bounding_box,
                 variables=data_dict["temp"]["sos_name"])

response = collector.raw(responseFormat="text/csv")
obs_loc_df = pd.read_csv(BytesIO(response.encode('utf-8')),
                         parse_dates=True,
                         index_col='date_time')

# Now let's specify start and end times
collector.start_time = start_time
# Print out all the SOS Data URL endpoints

# <codecell>

sos_urls = service_urls(csw.records, service='sos:url')
print("\n".join(sos_urls))

# <markdowncell>

# ## 1. Get observations from SOS
# Here we are using a custom class from pyoos to read the CO-OPS SOS.  This is definitely unsavory, as the whole point of using a standard is avoid the need for custom classes for each service.  Need to examine the consequences of removing this and just going with straight SOS service using OWSLib.

# <codecell>

collector = CoopsSos()

collector.set_datum('NAVD')  # MSL
collector.server.identification.title
collector.start_time = jd_start
collector.end_time = jd_stop
collector.variables = [sos_name]

# <codecell>

ofrs = collector.server.offerings
print(len(ofrs))
for p in ofrs[700:710]:
    print(p)

# <markdowncell>
示例#23
0
class CoopsSosTest(unittest.TestCase):
    def setUp(self):
        self.c = CoopsSos()

    def test_coops_server_id(self):
        assert self.c.server.identification.title == "NOAA.NOS.CO-OPS SOS"
        assert self.c.server.identification.service == "OGC:SOS"
        assert self.c.server.identification.version == "1.0.0"
        assert (
            self.c.server.identification.abstract
            == "NOAA.NOS.CO-OPS Sensor Observation Service (SOS) Server"
        )
        assert self.c.server.identification.keywords == [
            "Air Temperature",
            "Barometric Pressure",
            "Conductivity",
            "Currents",
            "Datum",
            "Harmonic Constituents",
            "Rain Fall",
            "Relative Humidity",
            "Salinity",
            "Visibility",
            "Water Level",
            "Water Level Predictions",
            "Water Temperature",
            "Winds",
        ]
        assert self.c.server.identification.fees == "NONE"
        assert self.c.server.identification.accessconstraints == "NONE"

    def test_coops_describe_sensor(self):
        self.c.features = ["8454000"]
        response = self.c.metadata(
            output_format='text/xml;subtype="sensorML/1.0.1/profiles/ioos_sos/1.0"'
        )
        assert isinstance(response[0], SensorML)

    def test_raw_coops_get_observation(self):
        self.c.start_time = datetime.strptime("2018-10-07", "%Y-%m-%d")
        self.c.end_time = datetime.strptime("2018-10-12", "%Y-%m-%d")
        self.c.features = ["8728690"]
        self.c.variables = [
            "water_surface_height_above_reference_datum"
        ]  # noqa

        response = self.c.raw(responseFormat="text/csv").decode()
        assert isinstance(response, string_types)
        data = list(csv.DictReader(io.StringIO(response)))
        assert (
            data[0]["station_id"] == "urn:ioos:station:NOAA.NOS.CO-OPS:8728690"
        )
        assert data[0]["datum_id"] == "urn:ioos:def:datum:noaa::MLLW"
        assert data[0]["date_time"] == "2018-10-07T00:00:00Z"
        assert (
            data[0]["water_surface_height_above_reference_datum (m)"]
            == "0.385"
        )
        assert data[0]["vertical_position (m)"] == "1.307"

    def test_raw_coops_get_observation_with_dataType(self):
        self.c.start_time = datetime.strptime("2012-10-01", "%Y-%m-%d")
        self.c.end_time = datetime.strptime("2012-10-02", "%Y-%m-%d")
        self.c.features = ["8454000"]
        self.c.variables = [
            "http://mmisw.org/ont/cf/parameter/water_surface_height_above_reference_datum"
        ]
        self.c.dataType = "VerifiedHighLow"

        response = self.c.raw(responseFormat="text/csv").decode()
        assert isinstance(response, string_types)
        """
        station_id,sensor_id,"latitude (degree)","longitude (degree)",date_time,"water_surface_height_above_reference_datum (m)",datum_id,"vertical_position (m)"  # noqa
        urn:ioos:station:NOAA.NOS.CO-OPS:8454000,urn:ioos:sensor:NOAA.NOS.CO-OPS:8454000:W3,41.8071,-71.4012,2012-10-01T01:00:00Z,1.617,urn:ioos:def:datum:noaa::MLLW,1.064
        """
        data = list(csv.DictReader(io.StringIO(response)))
        assert (
            data[0]["station_id"] == "urn:ioos:station:NOAA.NOS.CO-OPS:8454000"
        )
        assert data[0]["datum_id"] == "urn:ioos:def:datum:noaa::MLLW"
        assert data[0]["date_time"] == "2012-10-01T01:00:00Z"
        assert (
            data[0]["water_surface_height_above_reference_datum (m)"]
            == "1.617"
        )
        assert data[0]["vertical_position (m)"] == "1.064"

    def test_raw_coops_get_observation_with_datum(self):
        self.c.start_time = datetime.strptime("2012-10-01", "%Y-%m-%d")
        self.c.end_time = datetime.strptime("2012-10-02", "%Y-%m-%d")
        self.c.features = ["8454000"]
        self.c.variables = [
            "http://mmisw.org/ont/cf/parameter/water_surface_height_above_reference_datum"
        ]
        self.c.dataType = "VerifiedHighLow"
        self.c.datum = "NAVD"

        response = self.c.raw(responseFormat="text/csv").decode()
        assert isinstance(response, string_types)
        """
        station_id,sensor_id,"latitude (degree)","longitude (degree)",date_time,"water_surface_height_above_reference_datum (m)",datum_id,"vertical_position (m)"  # noqa
        urn:ioos:station:NOAA.NOS.CO-OPS:8454000,urn:ioos:sensor:NOAA.NOS.CO-OPS:8454000:W3,41.8071,-71.4012,2012-10-01T01:00:00Z,0.863,urn:ogc:def:datum:epsg::5103,1.818
        """
        data = list(csv.DictReader(io.StringIO(response)))
        assert len(data) == 4
        assert (
            data[0]["station_id"] == "urn:ioos:station:NOAA.NOS.CO-OPS:8454000"
        )
        assert data[0]["datum_id"] == "urn:ogc:def:datum:epsg::5103"
        assert data[0]["date_time"] == "2012-10-01T01:00:00Z"
        assert (
            data[0]["water_surface_height_above_reference_datum (m)"]
            == "0.863"
        )
        assert data[0]["vertical_position (m)"] == "1.818"
示例#24
0
print "Total SOS:",len(sos_urls)
print "\n".join(sos_urls[0:5])

# <markdowncell>

# ###Get most recent observations from NOAA COOPS stations in bounding box

# <codecell>

start_time = dt.datetime.strptime(start_date,'%Y-%m-%d %H:%M')
end_time = dt.datetime.strptime(end_date,'%Y-%m-%d %H:%M')
iso_start = start_time.strftime('%Y-%m-%dT%H:%M:%SZ')
iso_end = end_time.strftime('%Y-%m-%dT%H:%M:%SZ')

# Define the Coops collector
collector = CoopsSos()
print collector.server.identification.title
collector.variables = data_dict["temp"]["sos_name"]
collector.server.identification.title

# Don't specify start and end date in the filter and the most recent observation will be returned
collector.filter(bbox=bounding_box,
                 variables=data_dict["temp"]["sos_name"])

response = collector.raw(responseFormat="text/csv")
obs_loc_df = pd.read_csv(BytesIO(response.encode('utf-8')),
                         parse_dates=True,
                         index_col='date_time')

# Now let's specify start and end times
collector.start_time = start_time
def process_nos8661070_data(platform_handle,
                       units_converter,
                       xenia_db,
                       unique_dates):

  logger = logging.getLogger(__name__)
  utc_tz = timezone('UTC')
  eastern_tz= timezone('US/Eastern')
  row_entry_date = datetime.now()

  platform_name_parts = platform_handle.split('.')
  """
  Create a data collection object.
  Contructor parameters are:
    url - THe SWE endpoint we're interested in
    version - Optional default is '1.0.0' The SWE version the endpoint.
    xml - Optional default is None - The XML response from a GetCapabilities query of the server.
  """
  dataCollector = CoopsSos()
  """
  obs_list = ['http://mmisw.org/ont/cf/parameter/water_surface_height_above_reference_datum',
            'http://mmisw.org/ont/cf/parameter/sea_water_temperature',
            'http://mmisw.org/ont/cf/parameter/wind_speed',
            'http://mmisw.org/ont/cf/parameter/wind_from_direction']
  obs_list = [('water_surface_height_above_reference_datum', 'm'),
             ('sea_water_temperature', 'celsius'),
             ('wind_speed', 'm_s-1'),
              ('wind_from_direction', 'degrees_true')]
  """
  nos_to_xenia = {
    "water_surface_height_above_reference_datum": {
      "units": "m",
      "xenia_name": "water_level",
      "xenia_units": "m"

    },
    "sea_water_temperature": {
      "units": "celsius",
      "xenia_name": "water_temperature",
      "xenia_units": "celsius"
    },
    "wind_speed": {
      "units": "m_s-1",
      "xenia_name": "wind_speed",
      "xenia_units": "m_s-1"

    },
    "wind_from_direction": {
      "units": "degrees_true",
      "xenia_name": "wind_from_direction",
      "xenia_units": "degrees_true"

    }
  }
  #nos_obs = nos_to_xenia.keys()
  nos_obs = ['sea_water_temperature']
  if xenia_db.platformExists(platform_handle) == -1:
    s_order = 1
    obs_list = []
    for obs_key in nos_to_xenia:
      obs_info = nos_to_xenia[obs_key]
      obs_list.append({'obs_name': obs_info['xenia_name'],
                       'uom_name': obs_info['xenia_units'],
                       's_order': s_order})
    xenia_db.buildMinimalPlatform(platform_handle, obs_list)
  for start_date in unique_dates:
    utc_start_date = (eastern_tz.localize(datetime.strptime(start_date, '%Y-%m-%d'))).astimezone(utc_tz)
    start_date = utc_start_date - timedelta(hours=24)
    logger.debug("Platform: %s Begin Date: %s End Date: %s" % (platform_handle, start_date, utc_start_date))
    for single_obs in nos_obs:
      obs_type = nos_to_xenia[single_obs]['xenia_name']
      uom_type = nos_to_xenia[single_obs]['xenia_units']
      s_order = 1
      dataCollector.filter(features=['8661070'],
                           variables=[single_obs],
                           start=start_date,
                           end=utc_start_date)
      try:
        response = dataCollector.raw(responseFormat="text/csv")
      except Exception as e:
        logger.exception(e)
      else:
        csv_reader = csv.reader(response.split('\n'), delimiter=',')
        line_cnt = 0
        for row in csv_reader:
          if line_cnt > 0 and len(row):
            obs_date = datetime.strptime(row[4], '%Y-%m-%dT%H:%M:%SZ')
            obs_val = float(row[5])
            logger.debug("Adding obs: %s(%s) Date: %s Value: %s S_Order: %d" %\
                         (single_obs, uom_type, obs_date, obs_val, s_order))
            if not xenia_db.addMeasurement(obs_type,
                                    uom_type,
                                    platform_handle,
                                    obs_date.strftime('%Y-%m-%dT%H:%M:%S'),
                                    float(row[2]),
                                    float(row[3]),
                                    0,
                                    [obs_val],
                                    sOrder=s_order,
                                    autoCommit=True,
                                    rowEntryDate=row_entry_date ):
              logger.error(xenia_db.lastErrorMsg)

          line_cnt += 1
示例#26
0
# <markdowncell>

# ### SOS Requirements
# #### Use Pyoos SOS collector to obtain Observation data from COOPS.

# <codecell>

start_time = datetime.strptime(start_date, '%Y-%m-%d %H:%M')
end_time = datetime.strptime(end_date, '%Y-%m-%d %H:%M')

# <codecell>

iso_start = start_time.strftime('%Y-%m-%dT%H:%M:%SZ')
iso_end = end_time.strftime('%Y-%m-%dT%H:%M:%SZ')

collector = CoopsSos()
collector.set_datum('NAVD')
collector.server.identification.title
collector.start_time = start_time
collector.end_time = end_time
collector.variables = [data_dict["water"]["sos_name"]]

# <codecell>

print("Date: %s to %s" % (iso_start, iso_end))
box_str = ','.join(str(e) for e in box)
print("Lat/Lon Box: %s" % box_str)

# <codecell>

# Grab the opendap sos url and use it for the service.
示例#27
0
            st_list[station_name]["lat"] = lat_data[i]
            st_list[station_name]["source"] = source
            st_list[station_name]["lon"] = lon_data[i]
            print station_name

    print "number of stations in bbox", len(st_list.keys())
    return st_list


# <markdowncell>

# #COOPS Station Locations

# <codecell>

coops_collector = CoopsSos()
coops_collector.start_time = start_time
coops_collector.end_time = end_time
coops_collector.variables = data_dict["currents"]["sos_name"]
coops_collector.server.identification.title
print coops_collector.start_time, ":", coops_collector.end_time
ofrs = coops_collector.server.offerings
print(len(ofrs))

# <codecell>

print "Date: ", iso_start, " to ", iso_end
box_str = ','.join(str(e) for e in bounding_box)
print "Lat/Lon Box: ", box_str

url = (('http://opendap.co-ops.nos.noaa.gov/ioos-dif-sos/SOS?'
示例#28
0
def ind2ij(a,index):
    """
    returns a[j,i] for a.ravel()[index]
    """
    n,m = shape(lon)
    j = ceil(index/m).astype(int)
    i = remainder(index,m)
    return i,j

# <markdowncell>

# ## 1. Get observations from SOS

# <codecell>

collector = CoopsSos()

# <codecell>

collector.server.identification.title

# <codecell>

collector.start_time = jd_start
collector.end_time = jd_stop
collector.variables = [sos_name]

# <codecell>

ofrs = collector.server.offerings
示例#29
0
 def setUp(self):
     self.c = CoopsSos()
def process_nos8661070_data(platform_handle, units_converter, xenia_db,
                            unique_dates):

    logger = logging.getLogger(__name__)
    utc_tz = timezone('UTC')
    eastern_tz = timezone('US/Eastern')
    row_entry_date = datetime.now()

    platform_name_parts = platform_handle.split('.')
    """
  Create a data collection object.
  Contructor parameters are:
    url - THe SWE endpoint we're interested in
    version - Optional default is '1.0.0' The SWE version the endpoint.
    xml - Optional default is None - The XML response from a GetCapabilities query of the server.
  """
    dataCollector = CoopsSos()
    """
  obs_list = ['http://mmisw.org/ont/cf/parameter/water_surface_height_above_reference_datum',
            'http://mmisw.org/ont/cf/parameter/sea_water_temperature',
            'http://mmisw.org/ont/cf/parameter/wind_speed',
            'http://mmisw.org/ont/cf/parameter/wind_from_direction']
  obs_list = [('water_surface_height_above_reference_datum', 'm'),
             ('sea_water_temperature', 'celsius'),
             ('wind_speed', 'm_s-1'),
              ('wind_from_direction', 'degrees_true')]
  """
    nos_to_xenia = {
        "water_surface_height_above_reference_datum": {
            "units": "m",
            "xenia_name": "water_level",
            "xenia_units": "m"
        },
        "sea_water_temperature": {
            "units": "celsius",
            "xenia_name": "water_temperature",
            "xenia_units": "celsius"
        },
        "wind_speed": {
            "units": "m_s-1",
            "xenia_name": "wind_speed",
            "xenia_units": "m_s-1"
        },
        "wind_from_direction": {
            "units": "degrees_true",
            "xenia_name": "wind_from_direction",
            "xenia_units": "degrees_true"
        }
    }
    #nos_obs = nos_to_xenia.keys()
    nos_obs = ['sea_water_temperature']
    if xenia_db.platformExists(platform_handle) == -1:
        s_order = 1
        obs_list = []
        for obs_key in nos_to_xenia:
            obs_info = nos_to_xenia[obs_key]
            obs_list.append({
                'obs_name': obs_info['xenia_name'],
                'uom_name': obs_info['xenia_units'],
                's_order': s_order
            })
        xenia_db.buildMinimalPlatform(platform_handle, obs_list)
    for start_date in unique_dates:
        utc_start_date = (eastern_tz.localize(
            datetime.strptime(start_date, '%Y-%m-%d'))).astimezone(utc_tz)
        start_date = utc_start_date - timedelta(hours=24)
        logger.debug("Platform: %s Begin Date: %s End Date: %s" %
                     (platform_handle, start_date, utc_start_date))
        for single_obs in nos_obs:
            obs_type = nos_to_xenia[single_obs]['xenia_name']
            uom_type = nos_to_xenia[single_obs]['xenia_units']
            s_order = 1
            dataCollector.filter(features=['8661070'],
                                 variables=[single_obs],
                                 start=start_date,
                                 end=utc_start_date)
            try:
                response = dataCollector.raw(responseFormat="text/csv")
            except Exception as e:
                logger.exception(e)
            else:
                csv_reader = csv.reader(response.split('\n'), delimiter=',')
                line_cnt = 0
                for row in csv_reader:
                    if line_cnt > 0 and len(row):
                        obs_date = datetime.strptime(row[4],
                                                     '%Y-%m-%dT%H:%M:%SZ')
                        obs_val = float(row[5])
                        logger.debug("Adding obs: %s(%s) Date: %s Value: %s S_Order: %d" %\
                                     (single_obs, uom_type, obs_date, obs_val, s_order))
                        if not xenia_db.addMeasurement(
                                obs_type,
                                uom_type,
                                platform_handle,
                                obs_date.strftime('%Y-%m-%dT%H:%M:%S'),
                                float(row[2]),
                                float(row[3]),
                                0, [obs_val],
                                sOrder=s_order,
                                autoCommit=True,
                                rowEntryDate=row_entry_date):
                            logger.error(xenia_db.lastErrorMsg)

                    line_cnt += 1