Ejemplo n.º 1
0
class NdbcSosTest(unittest.TestCase):

    def setUp(self):
        self.c = NdbcSos()

    def test_ndbc_server_id(self):
        assert self.c.server.identification.title == "National Data Buoy Center SOS"
        assert self.c.server.identification.service == 'OGC:SOS'
        assert self.c.server.identification.version == '1.0.0'
        assert self.c.server.identification.abstract == 'National Data Buoy Center SOS'
        # assert self.c.server.identification.keywords == ['Weather', 'Ocean Currents', 'Air Temperature', 'Water Temperature', 'Conductivity', 'Salinity', 'Barometric Pressure', 'Water Level', 'Waves', 'Winds', 'NDBC']
        assert self.c.server.identification.fees == 'NONE'
        assert self.c.server.identification.accessconstraints == 'NONE'

    def test_ndbc_describe_sensor(self):
        self.c.features = ['41012']
        response = self.c.metadata(output_format='text/xml;subtype="sensorML/1.0.1"')
        assert isinstance(response, list)
        assert isinstance(response[0], SensorML)

    def test_raw_ndbc_get_observation(self):
        self.c.start_time   = datetime.strptime("2012-10-01", "%Y-%m-%d")
        self.c.end_time     = datetime.strptime("2012-10-02", "%Y-%m-%d")
        self.c.features     = ['41012']
        self.c.variables    = ['air_pressure_at_sea_level']

        response = self.c.raw(responseFormat="text/csv").decode()
        assert isinstance(response, string_types)
        """
        station_id,sensor_id,"latitude (degree)","longitude (degree)",date_time,"depth (m)","air_pressure_at_sea_level (hPa)"
        urn:ioos:station:wmo:41012,urn:ioos:sensor:wmo:41012::baro1,30.04,-80.55,2012-10-01T00:50:00Z,0.00,1009.8
        """
        data = list(csv.DictReader(io.StringIO(response)))
        assert data[0]['station_id'] == 'urn:ioos:station:wmo:41012'
        assert data[0]['sensor_id'] == 'urn:ioos:sensor:wmo:41012::baro1'
        assert data[0]['date_time'] == "2012-10-01T00:50:00Z"
        assert data[0]['depth (m)'] == "0.00"
        assert data[0]['air_pressure_at_sea_level (hPa)'] == "1009.8"

    def test_raw_ndbc_get_observation_all_stations(self):
        self.c.start_time   = datetime.strptime("2012-10-01", "%Y-%m-%d")
        self.c.end_time     = datetime.strptime("2012-10-02", "%Y-%m-%d")
        # TODO: This should not return all stations in the future.  We should make multiple requests.
        self.c.features     = ['32st0', '41012']  # Triggers network-all
        self.c.variables    = ['air_pressure_at_sea_level']

        response = self.c.raw(responseFormat="text/csv").decode()
        assert isinstance(response, string_types)

        data = list(csv.DictReader(io.StringIO(response)))
        stations = list(set([x['station_id'] for x in data]))
        # 265 stations measured air_pressure that day
        assert len(stations) == 265

        """
        station_id,sensor_id,"latitude (degree)","longitude (degree)",date_time,"depth (m)","air_pressure_at_sea_level (hPa)"
        urn:ioos:station:wmo:32st0,urn:ioos:sensor:wmo:32st0::baro1,-19.713,-85.585,2012-10-01T00:00:00Z,,1019.0
        """
        assert data[0]['station_id'] == 'urn:ioos:station:wmo:32st0'
        assert data[0]['sensor_id'] == 'urn:ioos:sensor:wmo:32st0::baro1'
        assert data[0]['date_time'] == "2012-10-01T00:00:00Z"
        assert data[0]['depth (m)'] == ""
        assert data[0]['air_pressure_at_sea_level (hPa)'] == "1019.0"

    def test_raw_ndbc_get_observation_no_stations(self):
        self.c.start_time   = datetime.strptime("2012-10-01", "%Y-%m-%d")
        self.c.end_time     = datetime.strptime("2012-10-02", "%Y-%m-%d")
        self.c.features     = []  # Triggers network-all
        self.c.variables    = ['air_pressure_at_sea_level']

        response = self.c.raw(responseFormat="text/csv").decode()
        assert isinstance(response, string_types)

        data = list(csv.DictReader(io.StringIO(response)))
        stations = list(set([x['station_id'] for x in data]))
        # 265 stations measured air_pressure that day
        assert len(stations) == 265

        """
        station_id,sensor_id,"latitude (degree)","longitude (degree)",date_time,"depth (m)","air_pressure_at_sea_level (hPa)"
        urn:ioos:station:wmo:32st0,urn:ioos:sensor:wmo:32st0::baro1,-19.713,-85.585,2012-10-01T00:00:00Z,,1019.0
        """
        assert data[0]['station_id'] == 'urn:ioos:station:wmo:32st0'
        assert data[0]['sensor_id'] == 'urn:ioos:sensor:wmo:32st0::baro1'
        assert data[0]['date_time'] == "2012-10-01T00:00:00Z"
        assert data[0]['depth (m)'] == ""
        assert data[0]['air_pressure_at_sea_level (hPa)'] == "1019.0"
Ejemplo n.º 2
0
class NdbcSosTest(unittest.TestCase):
    def setUp(self):
        self.c = NdbcSos()

    def test_ndbc_server_id(self):
        assert (
            self.c.server.identification.title
            == "National Data Buoy Center SOS"
        )
        assert self.c.server.identification.service == "OGC:SOS"
        assert self.c.server.identification.version == "1.0.0"
        assert (
            self.c.server.identification.abstract
            == "National Data Buoy Center SOS"
        )
        assert self.c.server.identification.fees == "NONE"
        assert self.c.server.identification.accessconstraints == "NONE"

    def test_ndbc_describe_sensor(self):
        self.c.features = ["41012"]
        response = self.c.metadata(
            output_format='text/xml;subtype="sensorML/1.0.1"'
        )
        assert isinstance(response, list)
        assert isinstance(response[0], SensorML)

    def test_raw_ndbc_get_observation(self):
        self.c.start_time = datetime.strptime("2012-10-01", "%Y-%m-%d")
        self.c.end_time = datetime.strptime("2012-10-02", "%Y-%m-%d")
        self.c.features = ["41012"]
        self.c.variables = ["air_pressure_at_sea_level"]

        response = self.c.raw(responseFormat="text/csv").decode()
        assert isinstance(response, string_types)
        """
        station_id,sensor_id,"latitude (degree)","longitude (degree)",date_time,"depth (m)","air_pressure_at_sea_level (hPa)"  # noqa
        urn:ioos:station:wmo:41012,urn:ioos:sensor:wmo:41012::baro1,30.04,-80.55,2012-10-01T00:50:00Z,0.00,1009.8
        """
        data = list(csv.DictReader(io.StringIO(response)))
        assert data[0]["station_id"] == "urn:ioos:station:wmo:41012"
        assert data[0]["sensor_id"] == "urn:ioos:sensor:wmo:41012::baro1"
        assert data[0]["date_time"] == "2012-10-01T00:50:00Z"
        assert data[0]["depth (m)"] == "0.00"
        assert data[0]["air_pressure_at_sea_level (hPa)"] == "1009.8"

    def test_raw_ndbc_get_observation_all_stations(self):
        self.c.start_time = datetime.strptime("2012-10-01", "%Y-%m-%d")
        self.c.end_time = datetime.strptime("2012-10-02", "%Y-%m-%d")
        # TODO: This should not return all stations in the future.  We should make multiple requests.
        self.c.features = ["32st0", "41012"]  # Triggers network-all
        self.c.variables = ["air_pressure_at_sea_level"]

        response = self.c.raw(responseFormat="text/csv").decode()
        assert isinstance(response, string_types)

        data = list(csv.DictReader(io.StringIO(response)))
        stations = list({x["station_id"] for x in data})
        # 264 stations measured air_pressure that day
        assert len(stations) == 264

        """
        station_id,sensor_id,"latitude (degree)","longitude (degree)",date_time,"depth (m)","air_pressure_at_sea_level (hPa)"  # noqa
        urn:ioos:station:wmo:32st0,urn:ioos:sensor:wmo:32st0::baro1,-19.713,-85.585,2012-10-01T00:00:00Z,,1019.0
        """
        assert data[0]["station_id"] == "urn:ioos:station:wmo:32st0"
        assert data[0]["sensor_id"] == "urn:ioos:sensor:wmo:32st0::baro1"
        assert data[0]["date_time"] == "2012-10-01T00:00:00Z"
        assert data[0]["depth (m)"] == "-2.44"
        assert data[0]["air_pressure_at_sea_level (hPa)"] == "1019.0"

    def test_raw_ndbc_get_observation_no_stations(self):
        self.c.start_time = datetime.strptime("2012-10-01", "%Y-%m-%d")
        self.c.end_time = datetime.strptime("2012-10-02", "%Y-%m-%d")
        self.c.features = []  # Triggers network-all
        self.c.variables = ["air_pressure_at_sea_level"]

        response = self.c.raw(responseFormat="text/csv").decode()
        assert isinstance(response, string_types)

        data = list(csv.DictReader(io.StringIO(response)))
        stations = list({x["station_id"] for x in data})
        # 264 stations measured air_pressure that day
        assert len(stations) == 264

        """
        station_id,sensor_id,"latitude (degree)","longitude (degree)",date_time,"depth (m)","air_pressure_at_sea_level (hPa)"  # noqa
        urn:ioos:station:wmo:32st0,urn:ioos:sensor:wmo:32st0::baro1,-19.713,-85.585,2012-10-01T00:00:00Z,,1019.0
        """
        assert data[0]["station_id"] == "urn:ioos:station:wmo:32st0"
        assert data[0]["sensor_id"] == "urn:ioos:sensor:wmo:32st0::baro1"
        assert data[0]["date_time"] == "2012-10-01T00:00:00Z"
        assert data[0]["depth (m)"] == "-2.44"
        assert data[0]["air_pressure_at_sea_level (hPa)"] == "1019.0"
Ejemplo n.º 3
0
    def get_ndbc_data(self, site, observations, begin_date, units_coverter,
                      db_obj):
        start_time = time.time()
        logger = logging.getLogger(self.__class__.__name__)
        logger.debug("Starting get_ndbc_data")

        row_entry_date = datetime.now()
        utc_tz = timezone('UTC')
        eastern_tz = timezone('US/Eastern')

        platform_handle = 'ndbc.%s.met' % (site)
        #if db_obj.platformExists(platform_handle) == -1:
        if db_obj.platformExists(platform_handle) is None:
            obs_list = []
            for obs_setup in observations:
                if site in obs_setup['sites']:
                    for xenia_obs in obs_setup['xenia_obs']:
                        obs_list.append({
                            'obs_name': xenia_obs['xenia_name'],
                            'uom_name': xenia_obs['xenia_units'],
                            's_order': 1
                        })
            db_obj.buildMinimalPlatform(platform_handle, obs_list)
        #Build sensor_id and m_type_id list.
        for obs_setup in observations:
            if site in obs_setup['sites']:
                for xenia_obs in obs_setup['xenia_obs']:
                    m_type_id = db_obj.mTypeExists(xenia_obs['xenia_name'],
                                                   xenia_obs['xenia_units'])
                    sensor_id = db_obj.sensorExists(xenia_obs['xenia_name'],
                                                    xenia_obs['xenia_units'],
                                                    platform_handle, 1)
                    xenia_obs['m_type_id'] = m_type_id
                    xenia_obs['sensor_id'] = sensor_id
        sos_query = NdbcSos()
        # dates.sort(reverse=True)

        logger.debug("Query site: %s for date: %s" % (site, begin_date))
        sos_query.clear()
        #utc_end_date = begin_date.astimezone(utc_tz) + timedelta(hours=24)
        utc_end_date = begin_date.astimezone(utc_tz)
        start_date = begin_date.astimezone(utc_tz) - timedelta(hours=24)

        for obs_setup in ndbc_obs:
            if site in obs_setup['sites']:
                date_ndx = None
                value_ndx = None
                lat_ndx = None
                lon_ndx = None
                depth_ndx = None

                sos_query.filter(features=[site],
                                 start=start_date,
                                 end=utc_end_date,
                                 variables=[obs_setup['sos_obs_query']])
                try:
                    # results = nos_query.collect()
                    response = sos_query.raw(responseFormat="text/csv")
                except Exception as e:
                    logger.exception(e)
                else:
                    csv_reader = csv.reader(
                        response.decode('utf-8').split('\n'), delimiter=',')
                    line_cnt = 0

                    for row in csv_reader:
                        for xenia_obs_setup in obs_setup['xenia_obs']:
                            obs_type = xenia_obs_setup['xenia_name']
                            uom_type = xenia_obs_setup['xenia_units']
                            s_order = 1

                            if line_cnt > 0 and len(row):
                                obs_date = datetime.strptime(
                                    row[date_ndx], '%Y-%m-%dT%H:%M:%SZ')
                                try:
                                    obs_val = float(row[value_ndx])
                                except ValueError as e:
                                    logger.exception(e)
                                    obs_val = 0.0
                                try:
                                    if depth_ndx is not None:
                                        depth = float(row[depth_ndx])
                                except ValueError as e:
                                    logger.exception(e)
                                    depth = 0
                                try:
                                    if lat_ndx is not None:
                                        latitude = float(row[lat_ndx])
                                    if lon_ndx is not None:
                                        longitude = float(row[lon_ndx])
                                except ValueError as e:
                                    logger.exception(e)
                                    latitude = 0.0
                                    longitude = 0.0

                                obs_rec = multi_obs(
                                    row_entry_date=row_entry_date,
                                    platform_handle=platform_handle,
                                    sensor_id=xenia_obs_setup['sensor_id'],
                                    m_type_id=xenia_obs_setup['m_type_id'],
                                    m_date=obs_date.strftime(
                                        '%Y-%m-%dT%H:%M:%S'),
                                    m_lon=longitude,
                                    m_lat=latitude,
                                    m_z=depth,
                                    m_value=obs_val,
                                )

                                rec_id = db_obj.addRec(obs_rec, True)
                                if rec_id is not None:
                                    logger.debug("Adding obs: %s(%s) Date: %s Value: %s S_Order: %d" % \
                                                 (obs_type, uom_type, obs_date, obs_val, s_order))
                                else:
                                    logger.error("Failed adding obs: %s(%s) Date: %s Value: %s S_Order: %d" % \
                                                 (obs_type, uom_type, obs_date, obs_val, s_order))
                            else:
                                if value_ndx is None:
                                    for ndx, val in enumerate(row):
                                        if val.lower().find(
                                                xenia_obs_setup['sos_obs_name']
                                        ) != -1:
                                            value_ndx = ndx
                                        if val.lower().find('date_time') != -1:
                                            date_ndx = ndx
                                        if val.lower().find('latitude') != -1:
                                            lat_ndx = ndx
                                        if val.lower().find('longitude') != -1:
                                            lon_ndx = ndx
                                        if val.lower().find('depth') != -1:
                                            depth_ndx = ndx
                        line_cnt += 1

        logger.debug("Finished get_ndbc_data in %f seconds" %
                     (time.time() - start_time))
def get_ndbc_data(site, dates, units_coverter, db_obj):
    start_time = time.time()
    logger = logging.getLogger(__name__)
    logger.debug("Starting get_ndbc_data")

    row_entry_date = datetime.now()
    utc_tz = timezone('UTC')
    eastern_tz = timezone('US/Eastern')

    platform_handle = 'ndbc.%s.met' % (site)
    if db_obj.platformExists(platform_handle) == -1:
        obs_list = []
        for obs_setup in ndbc_obs:
            if site in obs_setup['sites']:
                for xenia_obs in obs_setup['xenia_obs']:
                    obs_list.append({
                        'obs_name': xenia_obs['xenia_name'],
                        'uom_name': xenia_obs['xenia_units'],
                        's_order': 1
                    })
        db_obj.buildMinimalPlatform(platform_handle, obs_list)

    sos_query = NdbcSos()
    #dates.sort(reverse=True)
    dates.sort(reverse=True)
    for rec_date in dates:
        logger.debug("Query site: %s for date: %s" % (site, rec_date))
        sos_query.clear()
        utc_end_date = rec_date.astimezone(utc_tz) + timedelta(hours=24)
        start_date = rec_date.astimezone(utc_tz) - timedelta(hours=24)

        for obs_setup in ndbc_obs:
            if site in obs_setup['sites']:
                date_ndx = None
                value_ndx = None
                lat_ndx = None
                lon_ndx = None
                depth_ndx = None

                sos_query.filter(features=[site],
                                 start=start_date,
                                 end=utc_end_date,
                                 variables=[obs_setup['sos_obs_query']])
                try:
                    #results = nos_query.collect()
                    response = sos_query.raw(responseFormat="text/csv")
                except Exception as e:
                    logger.exception(e)
                else:
                    csv_reader = csv.reader(response.split('\n'),
                                            delimiter=',')
                    line_cnt = 0

                    for row in csv_reader:
                        for xenia_obs_setup in obs_setup['xenia_obs']:
                            obs_type = xenia_obs_setup['xenia_name']
                            uom_type = xenia_obs_setup['xenia_units']
                            s_order = 1

                            if line_cnt > 0 and len(row):
                                obs_date = datetime.strptime(
                                    row[date_ndx], '%Y-%m-%dT%H:%M:%SZ')
                                try:
                                    obs_val = float(row[value_ndx])
                                except ValueError as e:
                                    logger.exception(e)
                                    obs_val = 0.0
                                logger.debug("Adding obs: %s(%s) Date: %s Value: %s S_Order: %d" %\
                                             (obs_type, uom_type, obs_date, obs_val, s_order))
                                depth = 0
                                if depth_ndx is not None:
                                    depth = float(row[depth_ndx])
                                if not db_obj.addMeasurement(
                                        obs_type,
                                        uom_type,
                                        platform_handle,
                                        obs_date.strftime('%Y-%m-%dT%H:%M:%S'),
                                        float(row[lat_ndx]),
                                        float(row[lon_ndx]),
                                        depth, [obs_val],
                                        sOrder=s_order,
                                        autoCommit=True,
                                        rowEntryDate=row_entry_date):
                                    logger.error(db_obj.lastErrorMsg)
                            else:
                                if value_ndx is None:
                                    for ndx, val in enumerate(row):
                                        if val.lower().find(
                                                xenia_obs_setup['sos_obs_name']
                                        ) != -1:
                                            value_ndx = ndx
                                        if val.lower().find('date_time') != -1:
                                            date_ndx = ndx
                                        if val.lower().find('latitude') != -1:
                                            lat_ndx = ndx
                                        if val.lower().find('longitude') != -1:
                                            lon_ndx = ndx
                                        if val.lower().find('depth') != -1:
                                            depth_ndx = ndx
                        line_cnt += 1

    logger.debug("Finished get_ndbc_data in %f seconds" %
                 (time.time() - start_time))
Ejemplo n.º 5
0
start_time = dt.datetime.strptime(start_date, '%Y-%m-%d %H:%M')
end_time = dt.datetime.strptime(end_date, '%Y-%m-%d %H:%M')
iso_start = start_time.strftime('%Y-%m-%dT%H:%M:%SZ')
iso_end = end_time.strftime('%Y-%m-%dT%H:%M:%SZ')

# Define the SOS collector
collector = NdbcSos()
print collector.server.identification.title
collector.variables = data_dict["waves"]["sos_name"]
collector.server.identification.title

# Don't specify start and end date in the filter and the most recent observation will be returned
collector.filter(bbox=bounding_box, variables=data_dict["waves"]["sos_name"])

response = collector.raw(responseFormat="text/csv")
obs_loc_df = pd.read_csv(BytesIO(response.encode('utf-8')),
                         parse_dates=True,
                         index_col='date_time')

# Now let's specify start and end times
collector.start_time = start_time
collector.end_time = end_time

ofrs = collector.server.offerings

# <codecell>

obs_loc_df.head()

# <codecell>
Ejemplo n.º 6
0
class NdbcSosTest(unittest.TestCase):
    def setUp(self):
        self.c = NdbcSos()

    def test_ndbc_server_id(self):
        assert self.c.server.identification.title == "National Data Buoy Center SOS"
        assert self.c.server.identification.service == 'OGC:SOS'
        assert self.c.server.identification.version == '1.0.0'
        assert self.c.server.identification.abstract == 'National Data Buoy Center SOS'
        # assert self.c.server.identification.keywords == ['Weather', 'Ocean Currents', 'Air Temperature', 'Water Temperature', 'Conductivity', 'Salinity', 'Barometric Pressure', 'Water Level', 'Waves', 'Winds', 'NDBC']
        assert self.c.server.identification.fees == 'NONE'
        assert self.c.server.identification.accessconstraints == 'NONE'

    def test_ndbc_describe_sensor(self):
        self.c.features = ['41012']
        response = self.c.metadata(
            output_format='text/xml;subtype="sensorML/1.0.1"')
        assert isinstance(response, list)
        assert isinstance(response[0], SensorML)

    def test_raw_ndbc_get_observation(self):
        self.c.start_time = datetime.strptime("2012-10-01", "%Y-%m-%d")
        self.c.end_time = datetime.strptime("2012-10-02", "%Y-%m-%d")
        self.c.features = ['41012']
        self.c.variables = ['air_pressure_at_sea_level']

        response = self.c.raw(responseFormat="text/csv").decode()
        assert isinstance(response, string_types)
        """
        station_id,sensor_id,"latitude (degree)","longitude (degree)",date_time,"depth (m)","air_pressure_at_sea_level (hPa)"
        urn:ioos:station:wmo:41012,urn:ioos:sensor:wmo:41012::baro1,30.04,-80.55,2012-10-01T00:50:00Z,0.00,1009.8
        """
        data = list(csv.DictReader(io.StringIO(response)))
        assert data[0]['station_id'] == 'urn:ioos:station:wmo:41012'
        assert data[0]['sensor_id'] == 'urn:ioos:sensor:wmo:41012::baro1'
        assert data[0]['date_time'] == "2012-10-01T00:50:00Z"
        assert data[0]['depth (m)'] == "0.00"
        assert data[0]['air_pressure_at_sea_level (hPa)'] == "1009.8"

    def test_raw_ndbc_get_observation_all_stations(self):
        self.c.start_time = datetime.strptime("2012-10-01", "%Y-%m-%d")
        self.c.end_time = datetime.strptime("2012-10-02", "%Y-%m-%d")
        # TODO: This should not return all stations in the future.  We should make multiple requests.
        self.c.features = ['32st0', '41012']  # Triggers network-all
        self.c.variables = ['air_pressure_at_sea_level']

        response = self.c.raw(responseFormat="text/csv").decode()
        assert isinstance(response, string_types)

        data = list(csv.DictReader(io.StringIO(response)))
        stations = list(set([x['station_id'] for x in data]))
        # 265 stations measured air_pressure that day
        assert len(stations) == 265
        """
        station_id,sensor_id,"latitude (degree)","longitude (degree)",date_time,"depth (m)","air_pressure_at_sea_level (hPa)"
        urn:ioos:station:wmo:32st0,urn:ioos:sensor:wmo:32st0::baro1,-19.713,-85.585,2012-10-01T00:00:00Z,,1019.0
        """
        assert data[0]['station_id'] == 'urn:ioos:station:wmo:32st0'
        assert data[0]['sensor_id'] == 'urn:ioos:sensor:wmo:32st0::baro1'
        assert data[0]['date_time'] == "2012-10-01T00:00:00Z"
        assert data[0]['depth (m)'] == ""
        assert data[0]['air_pressure_at_sea_level (hPa)'] == "1019.0"

    def test_raw_ndbc_get_observation_no_stations(self):
        self.c.start_time = datetime.strptime("2012-10-01", "%Y-%m-%d")
        self.c.end_time = datetime.strptime("2012-10-02", "%Y-%m-%d")
        self.c.features = []  # Triggers network-all
        self.c.variables = ['air_pressure_at_sea_level']

        response = self.c.raw(responseFormat="text/csv").decode()
        assert isinstance(response, string_types)

        data = list(csv.DictReader(io.StringIO(response)))
        stations = list(set([x['station_id'] for x in data]))
        # 265 stations measured air_pressure that day
        assert len(stations) == 265
        """
        station_id,sensor_id,"latitude (degree)","longitude (degree)",date_time,"depth (m)","air_pressure_at_sea_level (hPa)"
        urn:ioos:station:wmo:32st0,urn:ioos:sensor:wmo:32st0::baro1,-19.713,-85.585,2012-10-01T00:00:00Z,,1019.0
        """
        assert data[0]['station_id'] == 'urn:ioos:station:wmo:32st0'
        assert data[0]['sensor_id'] == 'urn:ioos:sensor:wmo:32st0::baro1'
        assert data[0]['date_time'] == "2012-10-01T00:00:00Z"
        assert data[0]['depth (m)'] == ""
        assert data[0]['air_pressure_at_sea_level (hPa)'] == "1019.0"
Ejemplo n.º 7
0
obs_loc_df.head()

# <markdowncell>

# #### Get NDBC Station Data

# <codecell>

ndbc_collector = NdbcSos()
ndbc_collector.variables = data_dict["winds"]["sos_name"]
ndbc_collector.server.identification.title
# Don't specify start and end date in the filter and the most recent observation will be returned
ndbc_collector.filter(bbox=bounding_box,
                 variables=data_dict["winds"]["sos_name"])

response = ndbc_collector.raw(responseFormat="text/csv")
obs_loc_df = pd.read_csv(BytesIO(response.encode('utf-8')),
                         parse_dates=True,
                         index_col='date_time')

# Save the station info in a larger global dict
st_list = gather_station_info(obs_loc_df, st_list, "ndbc")

# Now let's specify start and end times
ndbc_collector.start_time = start_time
ndbc_collector.end_time = end_time

ofrs = ndbc_collector.server.offerings

# Print the first 5 rows of the DataFrame
obs_loc_df.head()
Ejemplo n.º 8
0
def get_ndbc(start, end, bbox , sos_name='waves',datum='MSL', verbose=True):
    """
    function to read NBDC data
    ###################
    sos_name = waves    
    all_col = (['station_id', 'sensor_id', 'latitude (degree)', 'longitude (degree)',
           'date_time', 'sea_surface_wave_significant_height (m)',
           'sea_surface_wave_peak_period (s)', 'sea_surface_wave_mean_period (s)',
           'sea_surface_swell_wave_significant_height (m)',
           'sea_surface_swell_wave_period (s)',
           'sea_surface_wind_wave_significant_height (m)',
           'sea_surface_wind_wave_period (s)', 'sea_water_temperature (c)',
           'sea_surface_wave_to_direction (degree)',
           'sea_surface_swell_wave_to_direction (degree)',
           'sea_surface_wind_wave_to_direction (degree)',
           'number_of_frequencies (count)', 'center_frequencies (Hz)',
           'bandwidths (Hz)', 'spectral_energy (m**2/Hz)',
           'mean_wave_direction (degree)', 'principal_wave_direction (degree)',
           'polar_coordinate_r1 (1)', 'polar_coordinate_r2 (1)',
           'calculation_method', 'sampling_rate (Hz)', 'name'])
    
    sos_name = winds    

    all_col = (['station_id', 'sensor_id', 'latitude (degree)', 'longitude (degree)',
       'date_time', 'depth (m)', 'wind_from_direction (degree)',
       'wind_speed (m/s)', 'wind_speed_of_gust (m/s)',
       'upward_air_velocity (m/s)', 'name'])

    """
    #add remove from above
    if   sos_name == 'waves':
            col = ['sea_surface_wave_significant_height (m)','sea_surface_wave_peak_period (s)',
                   'sea_surface_wave_mean_period (s)','sea_water_temperature (c)',
                   'sea_surface_wave_to_direction (degree)']
    elif sos_name == 'winds':
            col = ['wind_from_direction (degree)','wind_speed (m/s)',
                   'wind_speed_of_gust (m/s)','upward_air_velocity (m/s)']
   
    collector = NdbcSos()
    collector.set_bbox(bbox)
    collector.start_time = start

    collector.variables = [sos_name]
    ofrs = collector.server.offerings
    title = collector.server.identification.title
    
    collector.features = None
    collector.end_time = start + datetime.timedelta(1)
    response = collector.raw(responseFormat='text/csv')
    
    
    df = pd.read_csv(BytesIO(response), parse_dates=True)
    g = df.groupby('station_id')
    df = dict()
    for station in g.groups.keys():
        df.update({station: g.get_group(station).iloc[0]})
    df = pd.DataFrame.from_dict(df).T
    
    station_dict = {}
    for offering in collector.server.offerings:
        station_dict.update({offering.name: offering.description})
    
    names = []
    for sta in df.index:
        names.append(station_dict.get(sta, sta))
    
    df['name'] = names
    
    #override short time
    collector.end_time = end
    
    data = []
    for k, row in df.iterrows():
        station_id = row['station_id'].split(':')[-1]
        collector.features = [station_id]
        response = collector.raw(responseFormat='text/csv')
        kw = dict(parse_dates=True, index_col='date_time')
        obs = pd.read_csv(BytesIO(response), **kw).reset_index()
        obs = obs.drop_duplicates(subset='date_time').set_index('date_time')
        series = obs[col]
        series._metadata = dict(
            station=row.get('station_id'),
            station_name=row.get('name'),
            station_code=str(row.get('station_id').split(':')[-1]),
            sensor=row.get('sensor_id'),
            lon=row.get('longitude (degree)'),
            lat=row.get('latitude (degree)'),
            depth=row.get('depth (m)'),
        )
    
        data.append(series)
    
    
    # Clean the table.
    table = dict(
        station_name = [s._metadata.get('station_name') for s in data],
        station_code = [s._metadata.get('station_code') for s in data],
        sensor       = [s._metadata.get('sensor')       for s in data],
        lon          = [s._metadata.get('lon')          for s in data],
        lat          = [s._metadata.get('lat')          for s in data],
        depth        = [s._metadata.get('depth', 'NA')  for s in data],
    )
    

    table = pd.DataFrame(table).set_index('station_name')
    if verbose:
        print('Collector offerings')
        print('{}: {} offerings'.format(title, len(ofrs)))
    
    return data, table