Пример #1
0
class CoopsSosTest(unittest.TestCase):

    def setUp(self):
        self.c = CoopsSos()

    def test_coops_server_id(self):
        assert self.c.server.identification.title == "NOAA.NOS.CO-OPS SOS"
        assert self.c.server.identification.service == 'OGC:SOS'
        assert self.c.server.identification.version == '1.0.0'
        assert self.c.server.identification.abstract == 'NOAA.NOS.CO-OPS Sensor Observation Service (SOS) Server'
        assert self.c.server.identification.keywords == ['Air Temperature', 'Barometric Pressure', 'Conductivity', 'Currents', 'Datum', 'Harmonic Constituents', 'Rain Fall', 'Relative Humidity', 'Salinity', 'Visibility', 'Water Level', 'Water Level Predictions', 'Water Temperature', 'Winds']
        assert self.c.server.identification.fees == 'NONE'
        assert self.c.server.identification.accessconstraints == 'NONE'

    def test_coops_describe_sensor(self):
        self.c.features = ['8454000']
        response = self.c.metadata(output_format='text/xml;subtype="sensorML/1.0.1/profiles/ioos_sos/1.0"')
        assert isinstance(response[0], SensorML)

    def test_raw_coops_get_observation(self):
        self.c.start_time   = datetime.strptime("2012-10-01", "%Y-%m-%d")
        self.c.end_time     = datetime.strptime("2012-10-02", "%Y-%m-%d")
        self.c.features     = ['8454000']
        self.c.variables    = ['http://mmisw.org/ont/cf/parameter/water_surface_height_above_reference_datum']  # noqa

        response = self.c.raw(responseFormat="text/csv").decode()
        assert isinstance(response, string_types)
        """
        station_id,sensor_id,"latitude (degree)","longitude (degree)",date_time,"water_surface_height_above_reference_datum (m)",datum_id,"vertical_position (m)"
        urn:ioos:station:NOAA.NOS.CO-OPS:8454000,urn:ioos:sensor:NOAA.NOS.CO-OPS:8454000:A1,41.8071,-71.4012,2012-10-01T00:00:00Z,1.465,urn:ioos:def:datum:noaa::MLLW,1.064
        """
        data = list(csv.DictReader(io.StringIO(response)))
        assert data[0]['station_id'] == 'urn:ioos:station:NOAA.NOS.CO-OPS:8454000'
        assert data[0]['datum_id'] == "urn:ioos:def:datum:noaa::MLLW"
        assert data[0]['date_time'] == "2012-10-01T00:00:00Z"
        assert data[0]['water_surface_height_above_reference_datum (m)'] == "1.465"
        assert data[0]['vertical_position (m)'] == "1.064"

    def test_raw_coops_get_observation_with_dataType(self):
        self.c.start_time   = datetime.strptime("2012-10-01", "%Y-%m-%d")
        self.c.end_time     = datetime.strptime("2012-10-02", "%Y-%m-%d")
        self.c.features     = ['8454000']
        self.c.variables    = ['http://mmisw.org/ont/cf/parameter/water_surface_height_above_reference_datum']
        self.c.dataType     = "VerifiedHighLow"

        response = self.c.raw(responseFormat="text/csv").decode()
        assert isinstance(response, string_types)
        """
        station_id,sensor_id,"latitude (degree)","longitude (degree)",date_time,"water_surface_height_above_reference_datum (m)",datum_id,"vertical_position (m)"
        urn:ioos:station:NOAA.NOS.CO-OPS:8454000,urn:ioos:sensor:NOAA.NOS.CO-OPS:8454000:W3,41.8071,-71.4012,2012-10-01T01:00:00Z,1.617,urn:ioos:def:datum:noaa::MLLW,1.064
        """
        data = list(csv.DictReader(io.StringIO(response)))
        assert data[0]['station_id'] == 'urn:ioos:station:NOAA.NOS.CO-OPS:8454000'
        assert data[0]['datum_id'] == "urn:ioos:def:datum:noaa::MLLW"
        assert data[0]['date_time'] == "2012-10-01T01:00:00Z"
        assert data[0]['water_surface_height_above_reference_datum (m)'] == "1.617"
        assert data[0]['vertical_position (m)'] == "1.064"

    def test_raw_coops_get_observation_with_datum(self):
        self.c.start_time   = datetime.strptime("2012-10-01", "%Y-%m-%d")
        self.c.end_time     = datetime.strptime("2012-10-02", "%Y-%m-%d")
        self.c.features     = ['8454000']
        self.c.variables    = ['http://mmisw.org/ont/cf/parameter/water_surface_height_above_reference_datum']
        self.c.dataType     = "VerifiedHighLow"
        self.c.datum        = "NAVD"

        response = self.c.raw(responseFormat="text/csv").decode()
        assert isinstance(response, string_types)
        """
        station_id,sensor_id,"latitude (degree)","longitude (degree)",date_time,"water_surface_height_above_reference_datum (m)",datum_id,"vertical_position (m)"
        urn:ioos:station:NOAA.NOS.CO-OPS:8454000,urn:ioos:sensor:NOAA.NOS.CO-OPS:8454000:W3,41.8071,-71.4012,2012-10-01T01:00:00Z,0.863,urn:ogc:def:datum:epsg::5103,1.818
        """
        data = list(csv.DictReader(io.StringIO(response)))
        assert len(data) == 4
        assert data[0]['station_id'] == 'urn:ioos:station:NOAA.NOS.CO-OPS:8454000'
        assert data[0]['datum_id'] == "urn:ogc:def:datum:epsg::5103"
        assert data[0]['date_time'] == "2012-10-01T01:00:00Z"
        assert data[0]['water_surface_height_above_reference_datum (m)'] == "0.863"
        assert data[0]['vertical_position (m)'] == "1.818"
Пример #2
0
class CoopsSosTest(unittest.TestCase):
    def setUp(self):
        self.c = CoopsSos()

    def test_coops_server_id(self):
        assert self.c.server.identification.title == "NOAA.NOS.CO-OPS SOS"
        assert self.c.server.identification.service == "OGC:SOS"
        assert self.c.server.identification.version == "1.0.0"
        assert (
            self.c.server.identification.abstract
            == "NOAA.NOS.CO-OPS Sensor Observation Service (SOS) Server"
        )
        assert self.c.server.identification.keywords == [
            "Air Temperature",
            "Barometric Pressure",
            "Conductivity",
            "Currents",
            "Datum",
            "Harmonic Constituents",
            "Rain Fall",
            "Relative Humidity",
            "Salinity",
            "Visibility",
            "Water Level",
            "Water Level Predictions",
            "Water Temperature",
            "Winds",
        ]
        assert self.c.server.identification.fees == "NONE"
        assert self.c.server.identification.accessconstraints == "NONE"

    def test_coops_describe_sensor(self):
        self.c.features = ["8454000"]
        response = self.c.metadata(
            output_format='text/xml;subtype="sensorML/1.0.1/profiles/ioos_sos/1.0"'
        )
        assert isinstance(response[0], SensorML)

    def test_raw_coops_get_observation(self):
        self.c.start_time = datetime.strptime("2018-10-07", "%Y-%m-%d")
        self.c.end_time = datetime.strptime("2018-10-12", "%Y-%m-%d")
        self.c.features = ["8728690"]
        self.c.variables = [
            "water_surface_height_above_reference_datum"
        ]  # noqa

        response = self.c.raw(responseFormat="text/csv").decode()
        assert isinstance(response, string_types)
        data = list(csv.DictReader(io.StringIO(response)))
        assert (
            data[0]["station_id"] == "urn:ioos:station:NOAA.NOS.CO-OPS:8728690"
        )
        assert data[0]["datum_id"] == "urn:ioos:def:datum:noaa::MLLW"
        assert data[0]["date_time"] == "2018-10-07T00:00:00Z"
        assert (
            data[0]["water_surface_height_above_reference_datum (m)"]
            == "0.385"
        )
        assert data[0]["vertical_position (m)"] == "1.307"

    def test_raw_coops_get_observation_with_dataType(self):
        self.c.start_time = datetime.strptime("2012-10-01", "%Y-%m-%d")
        self.c.end_time = datetime.strptime("2012-10-02", "%Y-%m-%d")
        self.c.features = ["8454000"]
        self.c.variables = [
            "http://mmisw.org/ont/cf/parameter/water_surface_height_above_reference_datum"
        ]
        self.c.dataType = "VerifiedHighLow"

        response = self.c.raw(responseFormat="text/csv").decode()
        assert isinstance(response, string_types)
        """
        station_id,sensor_id,"latitude (degree)","longitude (degree)",date_time,"water_surface_height_above_reference_datum (m)",datum_id,"vertical_position (m)"  # noqa
        urn:ioos:station:NOAA.NOS.CO-OPS:8454000,urn:ioos:sensor:NOAA.NOS.CO-OPS:8454000:W3,41.8071,-71.4012,2012-10-01T01:00:00Z,1.617,urn:ioos:def:datum:noaa::MLLW,1.064
        """
        data = list(csv.DictReader(io.StringIO(response)))
        assert (
            data[0]["station_id"] == "urn:ioos:station:NOAA.NOS.CO-OPS:8454000"
        )
        assert data[0]["datum_id"] == "urn:ioos:def:datum:noaa::MLLW"
        assert data[0]["date_time"] == "2012-10-01T01:00:00Z"
        assert (
            data[0]["water_surface_height_above_reference_datum (m)"]
            == "1.617"
        )
        assert data[0]["vertical_position (m)"] == "1.064"

    def test_raw_coops_get_observation_with_datum(self):
        self.c.start_time = datetime.strptime("2012-10-01", "%Y-%m-%d")
        self.c.end_time = datetime.strptime("2012-10-02", "%Y-%m-%d")
        self.c.features = ["8454000"]
        self.c.variables = [
            "http://mmisw.org/ont/cf/parameter/water_surface_height_above_reference_datum"
        ]
        self.c.dataType = "VerifiedHighLow"
        self.c.datum = "NAVD"

        response = self.c.raw(responseFormat="text/csv").decode()
        assert isinstance(response, string_types)
        """
        station_id,sensor_id,"latitude (degree)","longitude (degree)",date_time,"water_surface_height_above_reference_datum (m)",datum_id,"vertical_position (m)"  # noqa
        urn:ioos:station:NOAA.NOS.CO-OPS:8454000,urn:ioos:sensor:NOAA.NOS.CO-OPS:8454000:W3,41.8071,-71.4012,2012-10-01T01:00:00Z,0.863,urn:ogc:def:datum:epsg::5103,1.818
        """
        data = list(csv.DictReader(io.StringIO(response)))
        assert len(data) == 4
        assert (
            data[0]["station_id"] == "urn:ioos:station:NOAA.NOS.CO-OPS:8454000"
        )
        assert data[0]["datum_id"] == "urn:ogc:def:datum:epsg::5103"
        assert data[0]["date_time"] == "2012-10-01T01:00:00Z"
        assert (
            data[0]["water_surface_height_above_reference_datum (m)"]
            == "0.863"
        )
        assert data[0]["vertical_position (m)"] == "1.818"
def process_nos8661070_data(platform_handle,
                       units_converter,
                       xenia_db,
                       unique_dates):

  logger = logging.getLogger(__name__)
  utc_tz = timezone('UTC')
  eastern_tz= timezone('US/Eastern')
  row_entry_date = datetime.now()

  platform_name_parts = platform_handle.split('.')
  """
  Create a data collection object.
  Contructor parameters are:
    url - THe SWE endpoint we're interested in
    version - Optional default is '1.0.0' The SWE version the endpoint.
    xml - Optional default is None - The XML response from a GetCapabilities query of the server.
  """
  dataCollector = CoopsSos()
  """
  obs_list = ['http://mmisw.org/ont/cf/parameter/water_surface_height_above_reference_datum',
            'http://mmisw.org/ont/cf/parameter/sea_water_temperature',
            'http://mmisw.org/ont/cf/parameter/wind_speed',
            'http://mmisw.org/ont/cf/parameter/wind_from_direction']
  obs_list = [('water_surface_height_above_reference_datum', 'm'),
             ('sea_water_temperature', 'celsius'),
             ('wind_speed', 'm_s-1'),
              ('wind_from_direction', 'degrees_true')]
  """
  nos_to_xenia = {
    "water_surface_height_above_reference_datum": {
      "units": "m",
      "xenia_name": "water_level",
      "xenia_units": "m"

    },
    "sea_water_temperature": {
      "units": "celsius",
      "xenia_name": "water_temperature",
      "xenia_units": "celsius"
    },
    "wind_speed": {
      "units": "m_s-1",
      "xenia_name": "wind_speed",
      "xenia_units": "m_s-1"

    },
    "wind_from_direction": {
      "units": "degrees_true",
      "xenia_name": "wind_from_direction",
      "xenia_units": "degrees_true"

    }
  }
  #nos_obs = nos_to_xenia.keys()
  nos_obs = ['sea_water_temperature']
  if xenia_db.platformExists(platform_handle) == -1:
    s_order = 1
    obs_list = []
    for obs_key in nos_to_xenia:
      obs_info = nos_to_xenia[obs_key]
      obs_list.append({'obs_name': obs_info['xenia_name'],
                       'uom_name': obs_info['xenia_units'],
                       's_order': s_order})
    xenia_db.buildMinimalPlatform(platform_handle, obs_list)
  for start_date in unique_dates:
    utc_start_date = (eastern_tz.localize(datetime.strptime(start_date, '%Y-%m-%d'))).astimezone(utc_tz)
    start_date = utc_start_date - timedelta(hours=24)
    logger.debug("Platform: %s Begin Date: %s End Date: %s" % (platform_handle, start_date, utc_start_date))
    for single_obs in nos_obs:
      obs_type = nos_to_xenia[single_obs]['xenia_name']
      uom_type = nos_to_xenia[single_obs]['xenia_units']
      s_order = 1
      dataCollector.filter(features=['8661070'],
                           variables=[single_obs],
                           start=start_date,
                           end=utc_start_date)
      try:
        response = dataCollector.raw(responseFormat="text/csv")
      except Exception as e:
        logger.exception(e)
      else:
        csv_reader = csv.reader(response.split('\n'), delimiter=',')
        line_cnt = 0
        for row in csv_reader:
          if line_cnt > 0 and len(row):
            obs_date = datetime.strptime(row[4], '%Y-%m-%dT%H:%M:%SZ')
            obs_val = float(row[5])
            logger.debug("Adding obs: %s(%s) Date: %s Value: %s S_Order: %d" %\
                         (single_obs, uom_type, obs_date, obs_val, s_order))
            if not xenia_db.addMeasurement(obs_type,
                                    uom_type,
                                    platform_handle,
                                    obs_date.strftime('%Y-%m-%dT%H:%M:%S'),
                                    float(row[2]),
                                    float(row[3]),
                                    0,
                                    [obs_val],
                                    sOrder=s_order,
                                    autoCommit=True,
                                    rowEntryDate=row_entry_date ):
              logger.error(xenia_db.lastErrorMsg)

          line_cnt += 1
Пример #4
0
        for v in variables_to_query:
            collector.filter(variables=[v])
            collector.filter(bbox=bounding_box)
            new_start = copy(start_date)
            new_end   = copy(start_date)

            # Hold dataframe for periodic concat
            v_frame = None

            while new_end < end_date:
                new_end = min(end_date, new_start + timedelta(days=1))
                collector.filter(start=new_start)
                collector.filter(end=new_end)
                try:
                    print "Collecting from {!s}: ({!s} -> {!s})".format(sos, new_start, new_end)
                    data = collector.raw()
                    new_frame = pd.DataFrame.from_csv(StringIO(data))
                    new_frame = new_frame.reset_index()
                    if v_frame is None:
                        v_frame = new_frame
                    else:
                        v_frame = pd.concat([v_frame, new_frame])
                        v_frame = v_frame.drop_duplicates()
                except ExceptionReport as e:
                    print "  [-] Error obtaining {!s} from {!s} - Message from server: {!s}".format(v, sos, e)
                    continue
                finally:
                    new_start = new_end

            if v_frame is not None:
                sos_dfs.append(v_frame)
Пример #5
0
    def get_nos_data(self, site, observations, begin_date, units_coverter,
                     db_obj):
        start_time = time.time()
        logger = logging.getLogger(self.__class__.__name__)
        logger.debug("Starting get_nos_data")

        row_entry_date = datetime.now()
        utc_tz = timezone('UTC')
        eastern_tz = timezone('US/Eastern')

        platform_handle = 'nos.%s.met' % (site)
        if db_obj.platformExists(platform_handle) is None:
            obs_list = []
            for obs_setup in observations:
                if site in obs_setup['sites']:
                    for xenia_obs in obs_setup['xenia_obs']:
                        obs_list.append({
                            'obs_name': xenia_obs['xenia_name'],
                            'uom_name': xenia_obs['xenia_units'],
                            's_order': 1
                        })
            db_obj.buildMinimalPlatform(platform_handle, obs_list)
        #Build sensor_id and m_type_id list.
        for obs_setup in observations:
            if site in obs_setup['sites']:
                for xenia_obs in obs_setup['xenia_obs']:
                    m_type_id = db_obj.mTypeExists(xenia_obs['xenia_name'],
                                                   xenia_obs['xenia_units'])
                    sensor_id = db_obj.sensorExists(xenia_obs['xenia_name'],
                                                    xenia_obs['xenia_units'],
                                                    platform_handle, 1)
                    xenia_obs['m_type_id'] = m_type_id
                    xenia_obs['sensor_id'] = sensor_id

        sos_query = CoopsSos()
        #dates.sort(reverse=True)

        logger.debug("Query site: %s for date: %s" % (site, begin_date))
        sos_query.clear()
        #utc_end_date = begin_date.astimezone(utc_tz) + timedelta(hours=24)
        utc_end_date = begin_date.astimezone(utc_tz)
        start_date = begin_date.astimezone(utc_tz) - timedelta(hours=24)

        for obs_setup in observations:
            if site in obs_setup['sites']:
                date_ndx = None
                value_ndx = None
                lat_ndx = None
                lon_ndx = None
                depth_ndx = None

                sos_query.filter(features=[site],
                                 start=start_date,
                                 end=utc_end_date,
                                 variables=[obs_setup['sos_obs_query']])
                try:
                    # results = nos_query.collect()
                    response = sos_query.raw(responseFormat="text/csv")
                except Exception as e:
                    logger.exception(e)
                else:
                    csv_reader = csv.reader(
                        response.decode('utf-8').split('\n'), delimiter=',')
                    line_cnt = 0

                    for row in csv_reader:
                        for xenia_obs_setup in obs_setup['xenia_obs']:
                            obs_type = xenia_obs_setup['xenia_name']
                            uom_type = xenia_obs_setup['xenia_units']
                            s_order = 1

                            if line_cnt > 0 and len(row):
                                obs_date = datetime.strptime(
                                    row[date_ndx], '%Y-%m-%dT%H:%M:%SZ')
                                try:
                                    obs_val = float(row[value_ndx])
                                except ValueError as e:
                                    logger.exception(e)
                                    obs_val = 0.0
                                try:
                                    if depth_ndx is not None:
                                        depth = float(row[depth_ndx])
                                    else:
                                        depth = 0
                                except ValueError as e:
                                    logger.exception(e)
                                    depth = 0
                                try:
                                    if lat_ndx is not None:
                                        latitude = float(row[lat_ndx])
                                    if lon_ndx is not None:
                                        longitude = float(row[lon_ndx])
                                except ValueError as e:
                                    logger.exception(e)
                                    latitude = 0.0
                                    longitude = 0.0

                                obs_rec = multi_obs(
                                    row_entry_date=row_entry_date,
                                    platform_handle=platform_handle,
                                    sensor_id=xenia_obs_setup['sensor_id'],
                                    m_type_id=xenia_obs_setup['m_type_id'],
                                    m_date=obs_date.strftime(
                                        '%Y-%m-%dT%H:%M:%S'),
                                    m_lon=longitude,
                                    m_lat=latitude,
                                    m_z=depth,
                                    m_value=obs_val,
                                )

                                rec_id = db_obj.addRec(obs_rec, True)
                                if rec_id is not None:
                                    logger.debug("Adding obs: %s(%s) Date: %s Value: %s S_Order: %d" % \
                                                 (obs_type, uom_type, obs_date, obs_val, s_order))
                                else:
                                    logger.error("Failed adding obs: %s(%s) Date: %s Value: %s S_Order: %d" % \
                                                 (obs_type, uom_type, obs_date, obs_val, s_order))
                            else:
                                if value_ndx is None:
                                    for ndx, val in enumerate(row):
                                        if val.lower().find(
                                                xenia_obs_setup['sos_obs_name']
                                        ) != -1:
                                            value_ndx = ndx
                                        if val.lower().find('date_time') != -1:
                                            date_ndx = ndx
                                        if val.lower().find('latitude') != -1:
                                            lat_ndx = ndx
                                        if val.lower().find('longitude') != -1:
                                            lon_ndx = ndx
                                        if val.lower().find('depth') != -1:
                                            depth_ndx = ndx
                        line_cnt += 1
        """
    for single_obs in nos_obs:
      obs_type = nos_obs[single_obs]['xenia_name']
      uom_type = nos_obs[single_obs]['xenia_units']
      s_order = 1

      nos_query.filter(features=[site], start=start_date, end=utc_end_date, variables=[single_obs])
      try:
        #results = nos_query.collect()
        response = nos_query.raw(responseFormat="text/csv")
      except Exception as e:
        logger.exception(e)
      else:
        csv_reader = csv.reader(response.split('\n'), delimiter=',')
        line_cnt = 0
        for row in csv_reader:
          if line_cnt > 0 and len(row):
            obs_date = datetime.strptime(row[4], '%Y-%m-%dT%H:%M:%SZ')
            obs_val = float(row[5])
            logger.debug("Adding obs: %s(%s) Date: %s Value: %s S_Order: %d" %\
                         (obs_type, uom_type, obs_date, obs_val, s_order))



          line_cnt += 1
    """
        logger.debug("Finished get_nos_data in %f seconds" %
                     (time.time() - start_time))

        return
Пример #6
0
start_time = dt.datetime.strptime(start_date,'%Y-%m-%d %H:%M')
end_time = dt.datetime.strptime(end_date,'%Y-%m-%d %H:%M')
iso_start = start_time.strftime('%Y-%m-%dT%H:%M:%SZ')
iso_end = end_time.strftime('%Y-%m-%dT%H:%M:%SZ')

# Define the Coops collector
collector = CoopsSos()
print collector.server.identification.title
collector.variables = data_dict["temp"]["sos_name"]
collector.server.identification.title

# Don't specify start and end date in the filter and the most recent observation will be returned
collector.filter(bbox=bounding_box,
                 variables=data_dict["temp"]["sos_name"])

response = collector.raw(responseFormat="text/csv")
obs_loc_df = pd.read_csv(BytesIO(response.encode('utf-8')),
                         parse_dates=True,
                         index_col='date_time')

# Now let's specify start and end times
collector.start_time = start_time
collector.end_time = end_time

ofrs = collector.server.offerings

# <codecell>

obs_loc_df.head()

# <codecell>
Пример #7
0
start_time = dt.datetime.strptime(start_date,'%Y-%m-%d %H:%M')
end_time = dt.datetime.strptime(end_date,'%Y-%m-%d %H:%M')
iso_start = start_time.strftime('%Y-%m-%dT%H:%M:%SZ')
iso_end = end_time.strftime('%Y-%m-%dT%H:%M:%SZ')

# Define the Coops collector
collector = CoopsSos()
print collector.server.identification.title
collector.variables = data_dict["temp"]["sos_name"]
collector.server.identification.title

# Don't specify start and end date in the filter and the most recent observation will be returned
collector.filter(bbox=bounding_box,
                 variables=data_dict["temp"]["sos_name"])

response = collector.raw(responseFormat="text/csv")
obs_loc_df = pd.read_csv(BytesIO(response.encode('utf-8')),
                         parse_dates=True,
                         index_col='date_time')

# Now let's specify start and end times
collector.start_time = start_time
collector.end_time = end_time

ofrs = collector.server.offerings

# <codecell>

obs_loc_df.head()

# <codecell>
def get_nos_data(site, dates, units_coverter, db_obj):
    start_time = time.time()
    logger = logging.getLogger(__name__)
    logger.debug("Starting get_nos_data")

    row_entry_date = datetime.now()
    utc_tz = timezone('UTC')
    eastern_tz = timezone('US/Eastern')

    platform_handle = 'nos.%s.met' % (site)
    if db_obj.platformExists(platform_handle) == -1:
        obs_list = []
        for single_obs in nos_obs:
            obs_list.append({
                'obs_name': nos_obs[single_obs]['xenia_name'],
                'uom_name': nos_obs[single_obs]['xenia_units'],
                's_order': 1
            })
        db_obj.buildMinimalPlatform(platform_handle, obs_list)

    nos_query = CoopsSos()
    #dates.sort(reverse=True)
    for rec_date in dates:
        logger.debug("Query site: %s for date: %s" % (site, rec_date))
        nos_query.clear()
        utc_end_date = rec_date.astimezone(utc_tz) + timedelta(hours=24)
        start_date = rec_date.astimezone(utc_tz) - timedelta(hours=24)

        for single_obs in nos_obs:
            obs_type = nos_obs[single_obs]['xenia_name']
            uom_type = nos_obs[single_obs]['xenia_units']
            s_order = 1

            nos_query.filter(features=[site],
                             start=start_date,
                             end=utc_end_date,
                             variables=[single_obs])
            try:
                #results = nos_query.collect()
                response = nos_query.raw(responseFormat="text/csv")
            except Exception as e:
                logger.exception(e)
            else:
                csv_reader = csv.reader(response.split('\n'), delimiter=',')
                line_cnt = 0
                for row in csv_reader:
                    if line_cnt > 0 and len(row):
                        obs_date = datetime.strptime(row[4],
                                                     '%Y-%m-%dT%H:%M:%SZ')
                        obs_val = float(row[5])
                        logger.debug("Adding obs: %s(%s) Date: %s Value: %s S_Order: %d" %\
                                     (obs_type, uom_type, obs_date, obs_val, s_order))

                        if not db_obj.addMeasurement(
                                obs_type,
                                uom_type,
                                platform_handle,
                                obs_date.strftime('%Y-%m-%dT%H:%M:%S'),
                                float(row[2]),
                                float(row[3]),
                                0, [obs_val],
                                sOrder=s_order,
                                autoCommit=True,
                                rowEntryDate=row_entry_date):
                            logger.error(db_obj.lastErrorMsg)

                    line_cnt += 1

    logger.debug("Finished get_nos_data in %f seconds" %
                 (time.time() - start_time))

    return
Пример #9
0
class CoopsSosTest(unittest.TestCase):
    def setUp(self):
        self.c = CoopsSos()

    def test_coops_server_id(self):
        assert self.c.server.identification.title == "NOAA.NOS.CO-OPS SOS"
        assert self.c.server.identification.service == 'OGC:SOS'
        assert self.c.server.identification.version == '1.0.0'
        assert self.c.server.identification.abstract == 'NOAA.NOS.CO-OPS Sensor Observation Service (SOS) Server'
        assert self.c.server.identification.keywords == [
            'Air Temperature', 'Barometric Pressure', 'Conductivity',
            'Currents', 'Datums', 'Rain Fall', 'Relative Humidity',
            'Harmonic Constituents', 'Salinity', 'Visibility', 'Water Level',
            'Water Level Predictions', 'Water Temperature', 'Winds'
        ]
        assert self.c.server.identification.fees == 'NONE'
        assert self.c.server.identification.accessconstraints == 'NONE'

    def test_coops_describe_sensor(self):
        self.c.features = ['8454000']
        response = self.c.metadata(
            output_format='text/xml;subtype="sensorML/1.0.1"')
        assert isinstance(response[0], SensorML)

    def test_raw_coops_get_observation(self):
        self.c.start_time = datetime.strptime("2012-10-01", "%Y-%m-%d")
        self.c.end_time = datetime.strptime("2012-10-02", "%Y-%m-%d")
        self.c.features = ['8454000']
        self.c.variables = [
            'http://mmisw.org/ont/cf/parameter/water_surface_height_above_reference_datum'
        ]

        response = self.c.raw(responseFormat="text/csv")
        assert isinstance(response, basestring)
        """
        station_id,sensor_id,"latitude (degree)","longitude (degree)",date_time,"water_surface_height_above_reference_datum (m)",datum_id,"vertical_position (m)"
        urn:ioos:station:NOAA.NOS.CO-OPS:8454000,urn:ioos:sensor:NOAA.NOS.CO-OPS:8454000:A1,41.8071,-71.4012,2012-10-01T00:00:00Z,1.465,urn:ioos:def:datum:noaa::MLLW,1.064
        """
        data = list(csv.DictReader(StringIO.StringIO(response)))
        assert data[0][
            'station_id'] == 'urn:ioos:station:NOAA.NOS.CO-OPS:8454000'
        assert data[0]['datum_id'] == "urn:ioos:def:datum:noaa::MLLW"
        assert data[0]['date_time'] == "2012-10-01T00:00:00Z"
        assert data[0][
            'water_surface_height_above_reference_datum (m)'] == "1.465"
        assert data[0]['vertical_position (m)'] == "1.064"

    def test_raw_coops_get_observation_with_dataType(self):
        self.c.start_time = datetime.strptime("2012-10-01", "%Y-%m-%d")
        self.c.end_time = datetime.strptime("2012-10-02", "%Y-%m-%d")
        self.c.features = ['8454000']
        self.c.variables = [
            'http://mmisw.org/ont/cf/parameter/water_surface_height_above_reference_datum'
        ]
        self.c.dataType = "VerifiedHighLow"

        response = self.c.raw(responseFormat="text/csv")
        assert isinstance(response, basestring)
        """
        station_id,sensor_id,"latitude (degree)","longitude (degree)",date_time,"water_surface_height_above_reference_datum (m)",datum_id,"vertical_position (m)"
        urn:ioos:station:NOAA.NOS.CO-OPS:8454000,urn:ioos:sensor:NOAA.NOS.CO-OPS:8454000:W3,41.8071,-71.4012,2012-10-01T01:00:00Z,1.617,urn:ioos:def:datum:noaa::MLLW,1.064
        """
        data = list(csv.DictReader(StringIO.StringIO(response)))
        assert data[0][
            'station_id'] == 'urn:ioos:station:NOAA.NOS.CO-OPS:8454000'
        assert data[0]['datum_id'] == "urn:ioos:def:datum:noaa::MLLW"
        assert data[0]['date_time'] == "2012-10-01T01:00:00Z"
        assert data[0][
            'water_surface_height_above_reference_datum (m)'] == "1.617"
        assert data[0]['vertical_position (m)'] == "1.064"

    def test_raw_coops_get_observation_with_datum(self):
        self.c.start_time = datetime.strptime("2012-10-01", "%Y-%m-%d")
        self.c.end_time = datetime.strptime("2012-10-02", "%Y-%m-%d")
        self.c.features = ['8454000']
        self.c.variables = [
            'http://mmisw.org/ont/cf/parameter/water_surface_height_above_reference_datum'
        ]
        self.c.dataType = "VerifiedHighLow"
        self.c.datum = "NAVD"

        response = self.c.raw(responseFormat="text/csv")
        assert isinstance(response, basestring)
        """
        station_id,sensor_id,"latitude (degree)","longitude (degree)",date_time,"water_surface_height_above_reference_datum (m)",datum_id,"vertical_position (m)"
        urn:ioos:station:NOAA.NOS.CO-OPS:8454000,urn:ioos:sensor:NOAA.NOS.CO-OPS:8454000:W3,41.8071,-71.4012,2012-10-01T01:00:00Z,0.863,urn:ogc:def:datum:epsg::5103,1.818
        """
        data = list(csv.DictReader(StringIO.StringIO(response)))
        assert len(data) == 4
        assert data[0][
            'station_id'] == 'urn:ioos:station:NOAA.NOS.CO-OPS:8454000'
        assert data[0]['datum_id'] == "urn:ogc:def:datum:epsg::5103"
        assert data[0]['date_time'] == "2012-10-01T01:00:00Z"
        assert data[0][
            'water_surface_height_above_reference_datum (m)'] == "0.863"
        assert data[0]['vertical_position (m)'] == "1.818"
def process_nos8661070_data(platform_handle, units_converter, xenia_db,
                            unique_dates):

    logger = logging.getLogger(__name__)
    utc_tz = timezone('UTC')
    eastern_tz = timezone('US/Eastern')
    row_entry_date = datetime.now()

    platform_name_parts = platform_handle.split('.')
    """
  Create a data collection object.
  Contructor parameters are:
    url - THe SWE endpoint we're interested in
    version - Optional default is '1.0.0' The SWE version the endpoint.
    xml - Optional default is None - The XML response from a GetCapabilities query of the server.
  """
    dataCollector = CoopsSos()
    """
  obs_list = ['http://mmisw.org/ont/cf/parameter/water_surface_height_above_reference_datum',
            'http://mmisw.org/ont/cf/parameter/sea_water_temperature',
            'http://mmisw.org/ont/cf/parameter/wind_speed',
            'http://mmisw.org/ont/cf/parameter/wind_from_direction']
  obs_list = [('water_surface_height_above_reference_datum', 'm'),
             ('sea_water_temperature', 'celsius'),
             ('wind_speed', 'm_s-1'),
              ('wind_from_direction', 'degrees_true')]
  """
    nos_to_xenia = {
        "water_surface_height_above_reference_datum": {
            "units": "m",
            "xenia_name": "water_level",
            "xenia_units": "m"
        },
        "sea_water_temperature": {
            "units": "celsius",
            "xenia_name": "water_temperature",
            "xenia_units": "celsius"
        },
        "wind_speed": {
            "units": "m_s-1",
            "xenia_name": "wind_speed",
            "xenia_units": "m_s-1"
        },
        "wind_from_direction": {
            "units": "degrees_true",
            "xenia_name": "wind_from_direction",
            "xenia_units": "degrees_true"
        }
    }
    #nos_obs = nos_to_xenia.keys()
    nos_obs = ['sea_water_temperature']
    if xenia_db.platformExists(platform_handle) == -1:
        s_order = 1
        obs_list = []
        for obs_key in nos_to_xenia:
            obs_info = nos_to_xenia[obs_key]
            obs_list.append({
                'obs_name': obs_info['xenia_name'],
                'uom_name': obs_info['xenia_units'],
                's_order': s_order
            })
        xenia_db.buildMinimalPlatform(platform_handle, obs_list)
    for start_date in unique_dates:
        utc_start_date = (eastern_tz.localize(
            datetime.strptime(start_date, '%Y-%m-%d'))).astimezone(utc_tz)
        start_date = utc_start_date - timedelta(hours=24)
        logger.debug("Platform: %s Begin Date: %s End Date: %s" %
                     (platform_handle, start_date, utc_start_date))
        for single_obs in nos_obs:
            obs_type = nos_to_xenia[single_obs]['xenia_name']
            uom_type = nos_to_xenia[single_obs]['xenia_units']
            s_order = 1
            dataCollector.filter(features=['8661070'],
                                 variables=[single_obs],
                                 start=start_date,
                                 end=utc_start_date)
            try:
                response = dataCollector.raw(responseFormat="text/csv")
            except Exception as e:
                logger.exception(e)
            else:
                csv_reader = csv.reader(response.split('\n'), delimiter=',')
                line_cnt = 0
                for row in csv_reader:
                    if line_cnt > 0 and len(row):
                        obs_date = datetime.strptime(row[4],
                                                     '%Y-%m-%dT%H:%M:%SZ')
                        obs_val = float(row[5])
                        logger.debug("Adding obs: %s(%s) Date: %s Value: %s S_Order: %d" %\
                                     (single_obs, uom_type, obs_date, obs_val, s_order))
                        if not xenia_db.addMeasurement(
                                obs_type,
                                uom_type,
                                platform_handle,
                                obs_date.strftime('%Y-%m-%dT%H:%M:%S'),
                                float(row[2]),
                                float(row[3]),
                                0, [obs_val],
                                sOrder=s_order,
                                autoCommit=True,
                                rowEntryDate=row_entry_date):
                            logger.error(xenia_db.lastErrorMsg)

                    line_cnt += 1