def get_coops(start, end, sos_name, units, bbox, verbose=False): collector = CoopsSos() collector.set_bbox(bbox) collector.end_time = end collector.start_time = start collector.variables = [sos_name] ofrs = collector.server.offerings title = collector.server.identification.title config = dict(units=units, sos_name=sos_name,) data = collector2table( collector=collector, config=config, col="{} ({})".format(sos_name, units.format(cf_units.UT_ISO_8859_1)), ) # Clean the table. table = dict( station_name=[s._metadata.get("station_name") for s in data], station_code=[s._metadata.get("station_code") for s in data], sensor=[s._metadata.get("sensor") for s in data], lon=[s._metadata.get("lon") for s in data], lat=[s._metadata.get("lat") for s in data], depth=[s._metadata.get("depth", "NA") for s in data], ) table = pd.DataFrame(table).set_index("station_name") if verbose: print("Collector offerings") print("{}: {} offerings".format(title, len(ofrs))) return data, table
def get_coops(start, end, sos_name, units, bbox, verbose=False): """ function to read COOPS data We need to retry in case of failure b/c the server cannot handle the high traffic during hurricane season. """ print(' >> Get CO-OPS information') collector = CoopsSos() collector.set_bbox(bbox) collector.end_time = end collector.start_time = start collector.variables = [sos_name] ofrs = collector.server.offerings title = collector.server.identification.title config = dict( units=units, sos_name=sos_name, datum='MSL', ###Saeed added ) data = collector2table( collector=collector, config=config, col='{} ({})'.format(sos_name, units.format(cf_units.UT_ISO_8859_1)) ) # Clean the table. table = dict( station_name = [s._metadata.get('station_name') for s in data], station_code = [s._metadata.get('station_code') for s in data], sensor = [s._metadata.get('sensor') for s in data], lon = [s._metadata.get('lon') for s in data], lat = [s._metadata.get('lat') for s in data], depth = [s._metadata.get('depth', 'NA') for s in data], ) table = pd.DataFrame(table).set_index('station_name') if verbose: print('Collector offerings') print('{}: {} offerings'.format(title, len(ofrs))) return data, table
st_list[station_name]["lon"] = lon_data[i] print station_name print "number of stations in bbox", len(st_list.keys()) return st_list # <markdowncell> # #COOPS Station Locations # <codecell> coops_collector = CoopsSos() coops_collector.start_time = start_time coops_collector.end_time = end_time coops_collector.variables = data_dict["currents"]["sos_name"] coops_collector.server.identification.title print coops_collector.start_time, ":", coops_collector.end_time ofrs = coops_collector.server.offerings print(len(ofrs)) # <codecell> print "Date: ", iso_start, " to ", iso_end box_str = ','.join(str(e) for e in bounding_box) print "Lat/Lon Box: ", box_str url = (('http://opendap.co-ops.nos.noaa.gov/ioos-dif-sos/SOS?' 'service=SOS&request=GetObservation&version=1.0.0&' 'observedProperty=%s&bin=1&'
st_list[station_name]["source"] = source st_list[station_name]["lon"] = lon_data[i] print station_name print "number of stations in bbox",len(st_list.keys()) return st_list # <markdowncell> # #COOPS Station Locations # <codecell> coops_collector = CoopsSos() coops_collector.start_time = start_time coops_collector.end_time = end_time coops_collector.variables = data_dict["currents"]["sos_name"] coops_collector.server.identification.title print coops_collector.start_time,":", coops_collector.end_time ofrs = coops_collector.server.offerings print(len(ofrs)) # <codecell> print "Date: ",iso_start," to ", iso_end box_str=','.join(str(e) for e in bounding_box) print "Lat/Lon Box: ",box_str url = (('http://opendap.co-ops.nos.noaa.gov/ioos-dif-sos/SOS?' 'service=SOS&request=GetObservation&version=1.0.0&' 'observedProperty=%s&bin=1&'
# <markdowncell> # ### SOS Requirements # <codecell> start_time = dt.datetime.strptime(start_date, "%Y-%m-%d %H:%M") end_time = dt.datetime.strptime(stop_date, "%Y-%m-%d %H:%M") iso_start = start_time.strftime("%Y-%m-%dT%H:%M:%SZ") iso_end = end_time.strftime("%Y-%m-%dT%H:%M:%SZ") # <codecell> collector = CoopsSos() collector.start_time = start_time collector.end_time = end_time collector.variables = data_dict["currents"]["sos_name"] collector.server.identification.title print collector.start_time, ":", collector.end_time ofrs = collector.server.offerings # <markdowncell> # ###Find all SOS stations within the bounding box and time extent # <codecell> print "Date: ", iso_start, " to ", iso_end box_str = ",".join(str(e) for e in bounding_box) print "Lat/Lon Box: ", box_str
log.info(fmt(' SOS URLs ')) for url in sos_urls: log.info('{}'.format(url)) # In[ ]: from pyoos.collectors.coops.coops_sos import CoopsSos collector = CoopsSos() sos_name = 'water_surface_height_above_reference_datum' datum = 'NAVD' collector.set_datum(datum) collector.end_time = stop collector.start_time = start collector.variables = [sos_name] ofrs = collector.server.offerings title = collector.server.identification.title log.info(fmt(' Collector offerings ')) log.info('{}: {} offerings'.format(title, len(ofrs))) # In[ ]: from pandas import read_csv from utilities import sos_request params = dict(observedProperty=sos_name,
service='sos:url') print("\n".join(sos_urls)) # <markdowncell> # ## 1. Get observations from SOS # Here we are using a custom class from pyoos to read the CO-OPS SOS. This is definitely unsavory, as the whole point of using a standard is avoid the need for custom classes for each service. Need to examine the consequences of removing this and just going with straight SOS service using OWSLib. # <codecell> collector = CoopsSos() collector.set_datum('NAVD') # MSL collector.server.identification.title collector.start_time = jd_start collector.end_time = jd_stop collector.variables = [sos_name] # <codecell> ofrs = collector.server.offerings print(len(ofrs)) for p in ofrs[700:710]: print(p) # <markdowncell> # ### Find the SOS stations within our bounding box and time extent # We would like to just use a filter on a collection to get a new collection, but PYOOS doesn't do that yet. So we do a GetObservation request for a collection, including a bounding box, and asking for one value at the start of the time period of interest. We use that to do a bounding box filter on the SOS server, which returns 1 point for each station found. So for 3 stations, we get back 3 records, in CSV format. We can strip the station ids from the CSV, and then we have a list of stations we can use with pyoos. The template for the GetObservation query for the bounding box filtered collection was generated using the GUI at http://opendap.co-ops.nos.noaa.gov/ioos-dif-sos/ # <codecell>
ndbc['name'] = names ndbc.set_index('name', inplace=True) to_html(ndbc.head()) # ### CoopsSoS # In[9]: from pyoos.collectors.coops.coops_sos import CoopsSos collector_coops = CoopsSos() collector_coops.set_bbox(bbox) collector_coops.end_time = stop collector_coops.start_time = start collector_coops.variables = [sos_name] ofrs = collector_coops.server.offerings title = collector_coops.server.identification.title log.info(fmt(' Collector offerings ')) log.info('{}: {} offerings'.format(title, len(ofrs))) # In[10]: from utilities import get_coops_metadata coops = collector2table(collector=collector_coops)
# <markdowncell> # ## 1. Get observations from SOS # <codecell> collector = CoopsSos() # <codecell> collector.server.identification.title # <codecell> collector.start_time = jd_start collector.end_time = jd_stop collector.variables = [sos_name] # <codecell> ofrs = collector.server.offerings # <codecell> print len(ofrs) for p in ofrs[700:710]: print p # <markdowncell> # ### Find the SOS stations within our bounding box and time extent # We would like to just use a filter on a collection to get a new collection, but PYOOS doesn't do that yet. So we do a GetObservation request for a collection, including a bounding box, and asking for one value at the start of the time period of interest. We use that to do a bounding box filter on the SOS server, which returns 1 point for each station found. So for 3 stations, we get back 3 records, in CSV format. We can strip the station ids from the CSV, and then we have a list of stations we can use with pyoos. The template for the GetObservation query for the bounding box filtered collection was generated using the GUI at http://opendap.co-ops.nos.noaa.gov/ioos-dif-sos/
# <codecell> start_time = datetime.strptime(start_date, '%Y-%m-%d %H:%M') end_time = datetime.strptime(end_date, '%Y-%m-%d %H:%M') # <codecell> iso_start = start_time.strftime('%Y-%m-%dT%H:%M:%SZ') iso_end = end_time.strftime('%Y-%m-%dT%H:%M:%SZ') collector = CoopsSos() collector.set_datum('NAVD') collector.server.identification.title collector.start_time = start_time collector.end_time = end_time collector.variables = [data_dict["water"]["sos_name"]] # <codecell> print("Date: %s to %s" % (iso_start, iso_end)) box_str = ','.join(str(e) for e in box) print("Lat/Lon Box: %s" % box_str) # <codecell> # Grab the opendap sos url and use it for the service. for sos_url in sos_urls: if 'opendap' in sos_url: break
dap_urls = non_stations print(fmt(" Filtered DAP ")) for url in dap_urls: print("{}.html".format(url)) Now we have a nice list of all the models available in the catalog for the domain we specified. We still need to find the observations for the same domain. To accomplish that we will use the `pyoos` library and search the [SOS CO-OPS](https://opendap.co-ops.nos.noaa.gov/ioos-dif-sos/) services using the virtually the same configuration options from the catalog search. from pyoos.collectors.coops.coops_sos import CoopsSos collector_coops = CoopsSos() collector_coops.set_bbox(config["region"]["bbox"]) collector_coops.end_time = config["date"]["stop"] collector_coops.start_time = config["date"]["start"] collector_coops.variables = [config["sos_name"]] ofrs = collector_coops.server.offerings title = collector_coops.server.identification.title print(fmt(" Collector offerings ")) print("{}: {} offerings".format(title, len(ofrs))) To make it easier to work with the data we extract the time-series as pandas tables and interpolate them to a common 1-hour interval index. import pandas as pd from ioos_tools.ioos import collector2table data = collector2table( collector=collector_coops,
log.info('{}.html'.format(url)) log.info(fmt(' SOS ')) for url in sos_urls: log.info('{}'.format(url)) # In[9]: from pyoos.collectors.coops.coops_sos import CoopsSos collector = CoopsSos() sos_name = 'water_surface_height_above_reference_datum' datum = 'NAVD' collector.set_datum(datum) collector.end_time = stop collector.start_time = start collector.variables = [sos_name] ofrs = collector.server.offerings title = collector.server.identification.title log.info(fmt(' Collector offerings ')) log.info('{}: {} offerings'.format(title, len(ofrs))) # In[10]: import requests from urlparse import urlparse # Web-parsing.