Esempio n. 1
0
    def harvest(self):
        self.sos = SensorObservationService(self.service.get('url'))
        for offering in self.sos.offerings:
            # TODO: We assume an offering should only have one procedure here
            # which will be the case in sos 2.0, but may not be the case right now
            # on some non IOOS supported servers.
            uid = offering.procedures[0]
            sp_uid = uid.split(":")

            # List storing the stations that have already been processed in this SOS server.
            # This is kept and checked later to avoid servers that have the same stations in many offerings.
            processed = []

            # temnplate:  urn:ioos:type:authority:id
            # sample:     ioos:station:wmo:21414
            if sp_uid[2] == "station":  # Station Offering
                if not uid in processed:
                    self.process_station(uid)
                processed.append(uid)
            elif sp_uid[2] == "network":  # Network Offering
                network_ds = IoosDescribeSensor(
                    self.sos.describe_sensor(
                        outputFormat=
                        'text/xml;subtype="sensorML/1.0.1/profiles/ioos_sos/1.0"',
                        procedure=uid))
                # Iterate over stations in the network and process them individually
                for proc in network_ds.procedures:
                    if proc is not None and proc.split(":")[2] == "station":
                        if not proc in processed:
                            self.process_station(proc)
                        processed.append(proc)
Esempio n. 2
0
    def test_sensor(self):
        data = open(resource_file(os.path.join('ioos_swe','SML-DescribeSensor-Sensor.xml')), "rU").read()
        d = IoosDescribeSensor(data)

        assert d.ioos_version == "1.0"
        assert d.system.name == "urn:ioos:sensor:us.glos:45023:sea_water_temperature"
        assert d.starting     == datetime(2013, 8, 26, 18, 10, tzinfo=pytz.utc)
        assert d.ending       == datetime(2013, 8, 26, 18, 10, tzinfo=pytz.utc)
Esempio n. 3
0
    def test_network(self):
        data = open(resource_file(os.path.join('ioos_swe','SML-DescribeSensor-Network.xml')), "rU").read()
        d    = IoosDescribeSensor(data)

        assert d.ioos_version == "1.0"
        assert d.system.name  == "urn:ioos:network:nanoos:all"
        assert d.procedures   == sorted([u'urn:ioos:station:wmo:41001', u'urn:ioos:station:wmo:41002'])
        assert d.starting     == datetime(2008, 4, 28, 8, tzinfo=pytz.utc)
        assert d.ending       == datetime(2012, 12, 27, 19, tzinfo=pytz.utc)
Esempio n. 4
0
    def test_station(self):
        data = open(resource_file(os.path.join('ioos_swe','SML-DescribeSensor-Station.xml')), "rU").read()
        d = IoosDescribeSensor(data)

        assert d.ioos_version == "1.0"
        assert d.system.name == "urn:ioos:station:wmo:41001"
        assert d.variables   == sorted([u'http://mmisw.org/ont/cf/parameter/sea_water_temperature',
                                        u'http://mmisw.org/ont/cf/parameter/sea_water_salinity',
                                        u'http://mmisw.org/ont/cf/parameter/air_pressure',
                                        u'http://mmisw.org/ont/cf/parameter/air_temperature'])
        assert d.starting     == datetime(2008, 4, 28, 8, tzinfo=pytz.utc)
        assert d.ending       == datetime(2012, 12, 27, 19, tzinfo=pytz.utc)
Esempio n. 5
0
    def harvest(self):
        self.sos = SensorObservationService(self.service.get('url'))

        scores = self.ccheck_service()
        metamap = self.metamap_service()
        try:
            self.save_ccheck_service('ioos', scores, metamap)
        finally:
            #except Exception as e:
            #app.logger.warn("could not save compliancecheck/metamap information: %s", e)
            pass

        # List storing the stations that have already been processed in this SOS server.
        # This is kept and checked later to avoid servers that have the same stations in many offerings.
        processed = []

        # handle network:all by increasing max timeout
        net_len = len(self.sos.offerings)
        net_timeout = 120 if net_len <= 36 else 5 * net_len

        # allow searching child offerings for by name for network offerings
        name_lookup = {o.name: o for o in self.sos.offerings}
        for offering in self.sos.offerings:
            # TODO: We assume an offering should only have one procedure here
            # which will be the case in sos 2.0, but may not be the case right now
            # on some non IOOS supported servers.
            uid = offering.procedures[0]
            sp_uid = uid.split(":")

            # template:   urn:ioos:type:authority:id
            # sample:     ioos:station:wmo:21414
            if len(sp_uid) > 2 and sp_uid[2] == "network":  # Network Offering
                if uid[-3:].lower() == 'all':
                    continue  # Skip the all
                net = self._describe_sensor(uid, timeout=net_timeout)

                network_ds = IoosDescribeSensor(net)
                # Iterate over stations in the network and process them individually

                for proc in network_ds.procedures:

                    if proc is not None and proc.split(":")[2] == "station":
                        if not proc in processed:
                            # offering associated with this procedure
                            proc_off = name_lookup.get(proc)
                            self.process_station(proc, proc_off)
                        processed.append(proc)
            else:
                # Station Offering, or malformed urn - try it anyway as if it is a station
                if not uid in processed:
                    self.process_station(uid, offering)
                processed.append(uid)
Esempio n. 6
0
    def process_station(self, uid, offering):
        """ Makes a DescribeSensor request based on a 'uid' parameter being a
            station procedure.  Also pass along an offering with
            getCapabilities information for items such as temporal extent"""

        GML_NS = "http://www.opengis.net/gml"
        XLINK_NS = "http://www.w3.org/1999/xlink"

        with app.app_context():

            app.logger.info("process_station: %s", uid)
            desc_sens = self._describe_sensor(uid, timeout=1200)
            # FIXME: add some kind of notice saying the station failed
            if desc_sens is None:
                app.logger.warn(
                    "Could not get a valid describeSensor response")
                return
            metadata_value = etree.fromstring(desc_sens)
            sensor_ml = SensorML(metadata_value)
            try:
                station_ds = IoosDescribeSensor(metadata_value)
            # if this doesn't conform to IOOS SensorML sub, fall back to
            # manually picking apart the SensorML
            except ows.ExceptionReport:
                station_ds = process_sensorml(sensor_ml.members[0])

            unique_id = station_ds.id
            if unique_id is None:
                app.logger.warn(
                    "Could not get a 'stationID' from the SensorML identifiers.  Looking for a definition of 'http://mmisw.org/ont/ioos/definition/stationID'"
                )
                return

            dataset = db.Dataset.find_one({'uid': unicode(unique_id)})
            if dataset is None:
                dataset = db.Dataset()
                dataset.uid = unicode(unique_id)
                dataset['active'] = True

            # Find service reference in Dataset.services and remove (to replace it)
            tmp = dataset.services[:]
            for d in tmp:
                if d['service_id'] == self.service.get('_id'):
                    dataset.services.remove(d)

            # Parsing messages
            messages = []

            # NAME
            name = unicode_or_none(station_ds.shortName)
            if name is None:
                messages.append(
                    u"Could not get a 'shortName' from the SensorML identifiers.  Looking for a definition of 'http://mmisw.org/ont/ioos/definition/shortName'"
                )

            # DESCRIPTION
            description = unicode_or_none(station_ds.longName)
            if description is None:
                messages.append(
                    u"Could not get a 'longName' from the SensorML identifiers.  Looking for a definition of 'http://mmisw.org/ont/ioos/definition/longName'"
                )

            # PLATFORM TYPE
            asset_type = unicode_or_none(
                getattr(station_ds, 'platformType', None))
            if asset_type is None:
                messages.append(
                    u"Could not get a 'platformType' from the SensorML identifiers.  Looking for a definition of 'http://mmisw.org/ont/ioos/definition/platformType'"
                )

            # LOCATION is in GML
            gj = None
            loc = station_ds.location
            if loc is not None and loc.tag == "{%s}Point" % GML_NS:
                pos_element = loc.find("{%s}pos" % GML_NS)
                # some older responses may uses the deprecated coordinates
                # element
                if pos_element is None:
                    # if pos not found use deprecated coordinates element
                    pos_element = loc.find("{%s}coordinates" % GML_NS)
                # strip out points
                positions = map(float, pos_element.text.split(" "))

                for el in [pos_element, loc]:
                    srs_name = testXMLAttribute(el, "srsName")
                    if srs_name:
                        crs = Crs(srs_name)
                        if crs.axisorder == "yx":
                            gj = json.loads(
                                geojson.dumps(
                                    geojson.Point([positions[1],
                                                   positions[0]])))
                        else:
                            gj = json.loads(
                                geojson.dumps(
                                    geojson.Point([positions[0],
                                                   positions[1]])))
                        break
                else:
                    if positions:
                        messages.append(
                            u"Position(s) found but could not parse SRS: %s, %s"
                            % (positions, srs_name))

            else:
                messages.append(
                    u"Found an unrecognized child of the sml:location element and did not attempt to process it: %s"
                    % loc)

            meta_str = unicode(etree.tostring(metadata_value)).strip()
            if len(meta_str) > 4000000:
                messages.append(
                    u'Metadata document was too large to store (len: %s)' %
                    len(meta_str))
                meta_str = u''

            service = {
                # Reset service
                'name': name,
                'description': description,
                'service_type': self.service.get('service_type'),
                'service_id': ObjectId(self.service.get('_id')),
                'data_provider': self.service.get('data_provider'),
                'metadata_type': u'sensorml',
                'metadata_value': u'',
                'time_min': getattr(offering, 'begin_position', None),
                'time_max': getattr(offering, 'end_position', None),
                'messages': map(unicode, messages),
                'keywords': map(unicode, sorted(station_ds.keywords)),
                'variables': map(unicode, sorted(station_ds.variables)),
                'asset_type': get_common_name(asset_type),
                'geojson': gj,
                'updated': datetime.utcnow()
            }

            dataset.services.append(service)
            dataset.updated = datetime.utcnow()
            dataset.save()

            # do compliance checker / metadata now
            scores = self.ccheck_station(sensor_ml)
            metamap = self.metamap_station(sensor_ml)

            try:
                self.save_ccheck_station('ioos', dataset._id, scores, metamap)
            except Exception as e:
                app.logger.warn(
                    "could not save compliancecheck/metamap information: %s",
                    e)

            return "Harvest Successful"
Esempio n. 7
0
    def get_stations_df(self, sos_url, station_urns_sel=None):
        """ Returns a Pandas Dataframe
        """
        # oFrmts: IOOS SOS OutputFormat strings (first is compliant to the IOOS SOS spec,
        # second is to accommodate NDBC).  More info here:
        # http://ioos.github.io/sos-guidelines/doc/wsdd/sos_wsdd_github_notoc/#describesensor-request:638e0b263020c13a76a55332bd966dbe
        oFrmts = [
            'text/xml; subtype="sensorML/1.0.1/profiles/ioos_sos/1.0"',
            'text/xml;subtype="sensorML/1.0.1"'
        ]
        params = {
            'service': 'SOS',
            'request': 'GetCapabilities',
            'acceptVersions': '1.0.0'
        }
        sos_url_params = sos_url + '?' + urlencode(params)
        # sos_url_params_quoted = quote(sos_url_params,"/=:")
        # sos_url_params_unquoted = unquote(sos_url_params)

        try:
            sosgc = SensorObservationService(sos_url_params)
        except (ConnectionError, ReadTimeout) as e:
            self.log.write(
                u"\nError: unable to connect to SOS service: {url} due to HTTP connection error."
                .format(url=sos_url_params))
            self.log.write(
                u"\nHTTP connection error: {err}.".format(err=str(e)))
            sys.exit(
                "\nError: unable to connect to SOS service: {url}. \nUnderlying HTTP connection error: {err}"
                .format(url=sos_url_params, err=str(e)))

        # vars to store returns from sos_collector.metadata_plus_exceptions function:
        sml_recs = {}
        sml_errors = {}
        describe_sensor_url = {}

        # leverage Pyoos Collector to query for all available stations and obtain SensorML
        # (if station subset not passed in --stations param)
        if station_urns_sel is not None:
            station_urns = station_urns_sel
        else:
            sos_collector = IoosSweSos(sos_url)
            station_urns = [
                urn.name for urn in sos_collector.server.offerings
                if 'network' not in urn.name.split(':')
            ]
            sos_collector.features = station_urns

            # write out stations in SOS that will be handled:
            if self.verbose:
                self.log.write(u"\nStations to process for SOS: {sos}".format(
                    sos=sos_url_params))
                print("Stations to process for SOS: {sos}".format(
                    sos=sos_url_params))
                for feature in sos_collector.features:
                    self.log.write(u"\n - {feature}".format(feature=feature))
                    print(" - {feature}".format(feature=feature))

            # iterate over possible oFrmts expected of the various SOS services (IOOS SOS 1.0, NDBC):
            # for fmt in reversed(oFrmts):
            for fmt in oFrmts:
                try:
                    sml_recs, sml_errors = sos_collector.metadata_plus_exceptions(
                        output_format=fmt, timeout=200)
                    # if no valid SensorML docs returned, try next oFrmt:
                    if not sml_recs:
                        continue
                    else:
                        # assign correct DescribeSensor url (use sos_collector.feature rather than sml_recs.keys() to
                        # create DescribeSensor URLs for failures to record in logs):
                        for station in sos_collector.features:
                            describe_sensor_url[
                                station] = self.generate_describe_sensor_url(
                                    sosgc, procedure=station, oFrmt=fmt)

                        # report on errors returned from metadata_plus_exceptions:
                        if sml_errors:
                            if self.verbose:
                                for station, msg in iteritems(sml_errors):
                                    self.log.write(
                                        u"\nSOS DescribeSensor error returned for: {station}, skipping. Error msg: {msg}"
                                        .format(station=station, msg=msg))
                                    print(
                                        "SOS DescribeSensor error returned for: {station}, skipping. Error msg: {msg}"
                                        .format(station=station, msg=msg))
                        else:
                            self.log.write(
                                u"\nSuccess, no errors returned from DescribeSensor requests in service: {sos}"
                                .format(sos=sos_url_params))
                            print(
                                "Success, no errors returned from DescribeSensor requests in service: {sos}"
                                .format(sos=sos_url_params))
                    break
                # ServiceException shouldn't be thrown by metadata_plus_exceptions function, but handle regardless by attempting next oFrmt:
                except ServiceException as e:
                    continue

        station_recs = []
        failures = []
        # generate Pandas DataFrame by populating 'station_recs' list by parsing SensorML strings:
        for station_idx, station_urn in enumerate(station_urns):
            if station_urns_sel is not None:
                # iterate oFrmts items for describe_sensor request (first is IOOS SOS spec-compliant, second is for NDBC SOS):
                for fmt in oFrmts:
                    try:
                        describe_sensor_url[
                            station_urn] = self.generate_describe_sensor_url(
                                sosgc, procedure=station_urn, oFrmt=fmt)
                        sml_str = sosgc.describe_sensor(procedure=station_urn,
                                                        outputFormat=fmt,
                                                        timeout=200)
                        break
                    except ServiceException as e:
                        sml_errors[station_urn] = str(e)
                        continue
                sml = SensorML(sml_str)

            else:
                # process valid SensorML responses, quietly pass on invalid stations (add to failures list for verbose reporting):
                try:
                    sml = sml_recs[station_urn]
                except KeyError:
                    self.log.write(
                        u"\n\nStation: {station} failed (no SensorML in sml_recs dict).  URL: {ds}"
                        .format(station=station_urn,
                                ds=describe_sensor_url[station_urn].replace(
                                    "&amp;", "&")))
                    print(
                        "Station: {station} failed (no SensorML in sml_recs dict).  URL: {ds}"
                        .format(station=station_urn,
                                ds=describe_sensor_url[station_urn].replace(
                                    "&amp;", "&")))
                    failures.append(station_urn)
                    continue

            if self.sos_type.lower() == 'ndbc':
                # later: add an error check
                sosgc_station_offering = sosgc.contents[
                    'station-' + station_urn.split(':')[-1]]
            else:
                sosgc_station_offering = None

            try:
                ds = IoosDescribeSensor(sml._root)
            except AttributeError:
                self.log.write(
                    u"\nInvalid SensorML passed to IoosDescribeSensor.  Check DescribeSensor request for : {station}, URL: "
                    .format(station=station,
                            ds=describe_sensor_url[station_urn].replace(
                                "&amp;", "&")))
                print(
                    "Invalid SensorML passed to IoosDescribeSensor.  Check DescribeSensor request for : {station}, URL: "
                    .format(station=station,
                            ds=describe_sensor_url[station_urn].replace(
                                "&amp;", "&")))

            station = OrderedDict()
            # debug:
            if self.verbose:
                self.log.write(u"\n\nProcessing station: {station}".format(
                    station=station_urn))
                print("Processing station: {station}".format(
                    station=station_urn))
                self.log.write("\n" +
                               etree.tostring(sml._root).decode('utf-8'))

            # assign 'pos' to GML point location (accommodate 'gml:coordinates' as used by NDBC if gml:Point not found):
            try:
                pos = testXMLValue(ds.system.location.find(self.nsp('gml:Point/gml:pos'))) \
                    if testXMLValue(ds.system.location.find(self.nsp('gml:Point/gml:pos'))) is not None \
                    else testXMLValue(ds.system.location.find(self.nsp('gml:Point/gml:coordinates')))
                station['lon'] = float(pos.split()[1])
                station['lat'] = float(pos.split()[0])
            except AttributeError as e:
                station['lon'] = None
                station['lat'] = None

            system_el = sml._root.findall(self.nsp('sml:member'))[0].find(
                self.nsp('sml:System'))

            # Parse the DocumentList into a dict storing documents by index value 'name' (may cause index duplication
            # errors but there is not enough information in SensorML for alternatives)
            # Assume that member corresponds to xlink:arcrole="urn:ogc:def:role:webPage"
            documents = system_el.findall(
                self.nsp('sml:documentation/sml:DocumentList/sml:member'))
            documents_dct = {}
            for d in documents:
                document = Documentation(d)
                name = testXMLAttribute(d, "name")
                # url = document.documents[0].url
                documents_dct[name] = document

            # obtain list of contacts (accommodate 'sml:contact' element repetition used by NDBC insead of  ContactList):
            contacts = system_el.findall(self.nsp('sml:contact/sml:ContactList/sml:member')) \
                if system_el.findall(self.nsp('sml:contact/sml:ContactList/sml:member')) \
                else system_el.findall(self.nsp('sml:contact'))
            contacts_dct = {}
            for c in contacts:
                contact = Contact(c)
                role = contact.role.split('/')[-1]
                contacts_dct[role] = contact

            # verify a 'publisher' Contact exists (template expects one):
            if "publisher" not in contacts_dct.keys():
                self.log.write(
                    u"\n\nStation: {station} skipped.  No \'http://mmisw.org/ont/ioos/definition/publisher\' Contact role defined in SensorML as required.  Roles defined: [{roles}]"
                    .format(station=station_urn,
                            roles=", ".join(contacts_dct.keys())))
                print(
                    "Station: {station} skipped.  No \'http://mmisw.org/ont/ioos/definition/publisher\' Contact role defined in SensorML as required.  Roles defined: [{roles}]"
                    .format(station=station_urn,
                            roles=", ".join(contacts_dct.keys())))
                failures.append(station_urn)
                continue

            sweQuants = system_el.findall(
                self.nsp('sml:outputs/sml:OutputList/sml:output/swe:Quantity'))
            quant_lst = [
                sweQuant.attrib['definition'] for sweQuant in sweQuants
            ]
            parameter_lst = [sweQuant.split('/')[-1] for sweQuant in quant_lst]

            # attempt to read beginPosition, if available, otherwise use current date
            # bc ISO requires date value in output location in template:
            beginPosition = testXMLValue(
                system_el.find(
                    self.nsp(
                        'sml:validTime/gml:TimePeriod/gml:beginPosition')))
            try:
                begin_service_date = parser.parse(beginPosition)
            except (AttributeError, TypeError) as e:
                begin_service_date = datetime.now(pytz.utc)

            station['station_urn'] = station_urn
            station['sos_url'] = sos_url_params
            station['describesensor_url'] = describe_sensor_url[station_urn]

            station['shortName'] = ds.shortName
            station['longName'] = ds.longName
            if self.sos_type.lower() == 'ndbc':
                station['wmoID'] = station_urn.split(':')[-1]
            else:
                station['wmoID'] = ds.get_ioos_def('wmoID', 'identifier', ont)
            station['serverName'] = self.server_name

            # Some capabilities-level metadata:
            station['title'] = sosgc.identification.title
            station['abstract'] = sosgc.identification.abstract
            station['keywords'] = sosgc.identification.keywords
            station['begin_service_date'] = begin_service_date

            # Beware that a station can have >1 classifier of the same type
            # This code does not accommodate that possibility
            station['platformType'] = ds.platformType
            station['parentNetwork'] = ds.get_ioos_def('parentNetwork',
                                                       'classifier', ont)
            station['sponsor'] = ds.get_ioos_def('sponsor', 'classifier', ont)

            # store some nested dictionaries in 'station' for appropriate SensorML sources:
            station['contacts_dct'] = contacts_dct
            station['documents_dct'] = documents_dct

            if self.sos_type.lower(
            ) == 'ndbc' and sosgc_station_offering is not None:
                station['starting'] = sosgc_station_offering.begin_position
                station['ending'] = sosgc_station_offering.end_position
            else:
                station['starting'] = ds.starting
                station['ending'] = ds.ending

            if self.sos_type.lower(
            ) == 'ndbc' and sosgc_station_offering is not None:
                station[
                    'variable_uris'] = sosgc_station_offering.observed_properties
                station['variables'] = [
                    var.split('/')[-1]
                    for var in sosgc_station_offering.observed_properties
                ]
                station['parameter_uris'] = ','.join(station['variable_uris'])
                station['parameters'] = ','.join(station['variables'])
            else:
                station['variable_uris'] = ds.variables
                station['variables'] = [
                    var.split('/')[-1] for var in ds.variables
                ]
                station['parameter_uris'] = ','.join(quant_lst)
                station['parameters'] = ','.join(parameter_lst)

            if self.verbose:
                for var in station['variable_uris']:
                    self.log.write(u"\nvariable: {var}".format(var=var))
                    print("variable: {var}".format(var=var))

            # print(sosgc.contents)
            # for id, offering in sosgc.contents.iteritems():
            #    print("sosgc.contents: {item}".format(item=id))

            # parse 'responseFormat' vals from SensorML:
            # response_formats = sosgc.contents[station_urn].response_formats
            response_formats = []
            for id, sosgc.content in sosgc.contents.items():
                if sosgc.content.name == station_urn:
                    response_formats = sosgc.content.response_formats
            # response_formats = [ sosgc.content.response_formats for id, sosgc.content in sosgc.contents.items()
            #                      if sosgc.content.name == station_urn ]

            # match responseFormats from SensorML (response_formats) against those passed in --response_formats parameter to
            # populate 'download_formats' list, that is then used to generate GetObservation requests for the template:
            # (default --response_formats values are: 'application/json,application/zip; subtype=x-netcdf' )
            download_formats = [
                response_format for response_format in response_formats
                if response_format in self.response_formats
            ]
            station['response_formats'] = response_formats
            station['download_formats'] = download_formats

            if self.verbose:
                for format in response_formats:
                    self.log.write(
                        u"\nresponseFormat: {format}".format(format=format))
                    print("responseFormat: {format}".format(format=format))
                for format in download_formats:
                    self.log.write(
                        u"\ndownloadFormats: {format}".format(format=format))
                    print("downloadFormats: {format}".format(format=format))

            # calculate event_time using self.getobs_req_hours:
            event_time_formatstr = "{begin:%Y-%m-%dT%H:%M:%S}{utc_code}/{end:%Y-%m-%dT%H:%M:%S}{utc_code}"
            utc_code = 'Z' if self.sos_type.lower() == 'ndbc' else ''
            if station['starting'] is not None and station[
                    'ending'] is not None:
                event_time = event_time_formatstr.format(
                    begin=station['ending'] -
                    timedelta(hours=self.getobs_req_hours),
                    end=station['ending'],
                    utc_code=utc_code)
                if self.verbose:
                    self.log.write(
                        u"\nUsing starting/ending times from SensorML for eventTime"
                    )
                    print(
                        "Using starting/ending times from SensorML for eventTime"
                    )
                    self.log.write(
                        u"\nobservationTimeRange: starting: {start}, ending: {end}"
                        .format(start=station['starting'],
                                end=station['ending']))
                    print(
                        "observationTimeRange: starting: {start}, ending: {end}"
                        .format(start=station['starting'],
                                end=station['ending']))
            else:
                now = datetime.now(pytz.utc)
                then = now - timedelta(hours=self.getobs_req_hours)
                event_time = event_time_formatstr.format(begin=then,
                                                         end=now,
                                                         utc_code=utc_code)
                if self.verbose:
                    self.log.write(
                        u"\nNo 'observationTimeRange' present in SensorML.  Using present time for eventTime: then: {then:%Y-%m-%dT%H:%M:%S%z}, now: {now:%Y-%m-%dT%H:%M:%S%z}"
                        .format(then=then, now=now))
                    print(
                        "No 'observationTimeRange' present in SensorML.  Using present time for eventTime: then: {then:%Y-%m-%dT%H:%M:%S%z}, now: {now:%Y-%m-%dT%H:%M:%S%z}"
                        .format(then=then, now=now))

            if self.verbose:
                self.log.write(u"\neventTime: {time}".format(time=event_time))
                print("eventTime: {time}".format(time=event_time))

            # create a dict to store parameters for valid example GetObservation requests for station:
            getobs_req_dct = {}
            # populate a parameters dictionary for download links for each 'observedProperty' type
            # and secondly for each 'responseFormat' per observedProperty:
            getobs_params_base = {
                'service': 'SOS',
                'request': 'GetObservation',
                'version': '1.0.0',
                'offering': station_urn,
                'eventTime': event_time
            }
            for variable in station['variable_uris']:
                getobs_params = getobs_params_base.copy()
                getobs_params['observedProperty'] = variable
                variable = variable.split('/')[-1]
                for format in download_formats:
                    getobs_params['responseFormat'] = format
                    getobs_request_url_encoded = sos_url + '?' + urlencode(
                        getobs_params)
                    getobs_request_url = unquote(getobs_request_url_encoded)
                    getobs_req_dct[variable + '-' + format] = {
                        'variable':
                        variable,
                        'url':
                        getobs_request_url,
                        'format_type':
                        self.RESPONSE_FORMAT_TYPE_MAP.get(format, format),
                        'format_name':
                        self.RESPONSE_FORMAT_NAME_MAP.get(format, format)
                    }
                    if self.verbose:
                        self.log.write(
                            u"\ngetobs_request_url (var: {variable}): {getobs_request_url}"
                            .format(variable=variable.split("/")[-1],
                                    getobs_request_url=getobs_request_url))
                        print(
                            "getobs_request_url (var: {variable}): {getobs_request_url}"
                            .format(variable=variable.split("/")[-1],
                                    getobs_request_url=getobs_request_url))

            # ToDo: finish adding the 'getobs_req_dct' to the output template
            station['getobs_req_dct'] = getobs_req_dct

            station_recs.append(station)

        # extra debug for failed stations in verbose mode:
        if self.verbose:
            self.log.write(
                u"\n\n\nSOS DescribeSensor request errors recap.  Failed requests:"
            )
            print("SOS DescribeSensor request errors recap.  Failed requests:")
            for station_fail, msg in iteritems(sml_errors):
                self.log.write(
                    u"\n{station} - {msg}.  DescribeSensor URL: {ds}".format(
                        station=station_fail,
                        msg=msg,
                        ds=describe_sensor_url[station_fail].replace(
                            "&amp;", "&")))
                print("{station} - {msg}.  DescribeSensor URL: {ds}".format(
                    station=station_fail,
                    msg=msg,
                    ds=describe_sensor_url[station_fail].replace("&amp;",
                                                                 "&")))
            if failures:
                self.log.write(
                    u"\nStations in 'failures' list (should match DescribeSensor errors):"
                )
                print(
                    "Stations in 'failures' list (should match DescribeSensor errors):"
                )
                for station_fail in failures:
                    self.log.write(u"\n{station}".format(station=station_fail))
                    print("{station}".format(station=station_fail))

        if station_recs:
            stations_df = pd.DataFrame.from_records(station_recs,
                                                    columns=station.keys())
            stations_df.index = stations_df['station_urn']
            return stations_df
        else:
            return None
Esempio n. 8
0
    def process_station(self, uid):
        """ Makes a DescribeSensor request based on a 'uid' parameter being a station procedure """

        GML_NS = "http://www.opengis.net/gml"
        XLINK_NS = "http://www.w3.org/1999/xlink"

        with app.app_context():

            metadata_value = etree.fromstring(
                self.sos.describe_sensor(
                    outputFormat=
                    'text/xml;subtype="sensorML/1.0.1/profiles/ioos_sos/1.0"',
                    procedure=uid))
            station_ds = IoosDescribeSensor(metadata_value)

            unique_id = station_ds.id
            if unique_id is None:
                app.logger.warn(
                    "Could not get a 'stationID' from the SensorML identifiers.  Looking for a definition of 'http://mmisw.org/ont/ioos/definition/stationID'"
                )
                return

            dataset = db.Dataset.find_one({'uid': unicode(unique_id)})
            if dataset is None:
                dataset = db.Dataset()
                dataset.uid = unicode(unique_id)

            # Find service reference in Dataset.services and remove (to replace it)
            tmp = dataset.services[:]
            for d in tmp:
                if d['service_id'] == self.service.get('_id'):
                    dataset.services.remove(d)

            # Parsing messages
            messages = []

            # NAME
            name = unicode_or_none(station_ds.shortName)
            if name is None:
                messages.append(
                    u"Could not get a 'shortName' from the SensorML identifiers.  Looking for a definition of 'http://mmisw.org/ont/ioos/definition/shortName'"
                )

            # DESCRIPTION
            description = unicode_or_none(station_ds.longName)
            if description is None:
                messages.append(
                    u"Could not get a 'longName' from the SensorML identifiers.  Looking for a definition of 'http://mmisw.org/ont/ioos/definition/longName'"
                )

            # PLATFORM TYPE
            asset_type = unicode_or_none(station_ds.platformType)
            if asset_type is None:
                messages.append(
                    u"Could not get a 'platformType' from the SensorML identifiers.  Looking for a definition of 'http://mmisw.org/ont/ioos/definition/platformType'"
                )

            # LOCATION is in GML
            gj = None
            loc = station_ds.location
            if loc is not None and loc.tag == "{%s}Point" % GML_NS:
                pos_element = loc.find("{%s}pos" % GML_NS)
                # strip out points
                positions = map(float, testXMLValue(pos_element).split(" "))
                crs = Crs(testXMLAttribute(pos_element, "srsName"))
                if crs.axisorder == "yx":
                    gj = json.loads(
                        geojson.dumps(
                            geojson.Point([positions[1], positions[0]])))
                else:
                    gj = json.loads(
                        geojson.dumps(
                            geojson.Point([positions[0], positions[1]])))
            else:
                messages.append(
                    u"Found an unrecognized child of the sml:location element and did not attempt to process it: %s"
                    % etree.tostring(loc).strip())

            service = {
                # Reset service
                'name': name,
                'description': description,
                'service_type': self.service.get('service_type'),
                'service_id': ObjectId(self.service.get('_id')),
                'data_provider': self.service.get('data_provider'),
                'metadata_type': u'sensorml',
                'metadata_value':
                unicode(etree.tostring(metadata_value)).strip(),
                'messages': map(unicode, messages),
                'keywords': map(unicode, sorted(station_ds.keywords)),
                'variables': map(unicode, sorted(station_ds.variables)),
                'asset_type': asset_type,
                'geojson': gj,
                'updated': datetime.utcnow()
            }

            dataset.services.append(service)
            dataset.updated = datetime.utcnow()
            dataset.save()
            return "Harvested"
Esempio n. 9
0
    def get_stations_df(self, sos_url, station_urns_sel=None):
        """ Returns a GeoDataFrame
        """
        # LATER: ADD ERROR TEST/CATCH AFTER EACH WEB REQUEST
        oFrmt = 'text/xml; subtype="sensorML/1.0.1/profiles/ioos_sos/1.0"'
        params = {
            'service': 'SOS',
            'request': 'GetCapabilities',
            'acceptVersions': '1.0.0'
        }
        sos_url_params = sos_url + '?' + urlencode(params)
        sos_url_params_esc = sos_url_params.replace("&", "&amp;")
        # sos_url_params_quoted = quote(sos_url_params,"/=:")
        # sos_url_params_unquoted = unquote(sos_url_params)

        try:
            sosgc = SensorObservationService(sos_url_params)
        except (ConnectionError, ReadTimeout) as e:
            self.log.write(
                u"Error: unable to connect to SOS service: {url} due to HTTP connection error.\n"
                .format(url=sos_url_params))
            self.log.write(
                u"HTTP connection error: {err}.\n".format(err=e.message))
            sys.exit(
                "Error: unable to connect to SOS service: {url}. \nUnderlying HTTP connection error: {err}"
                .format(url=sos_url_params, err=e.message))

        if station_urns_sel is not None:
            station_urns = station_urns_sel
        else:
            sos_collector = IoosSweSos(sos_url)
            station_urns = [
                urn.name for urn in sos_collector.server.offerings
                if 'network' not in urn.name.split(':')
            ]
            sos_collector.features = station_urns
            sml_lst = sos_collector.metadata(timeout=200)

        station_recs = []
        for station_idx, station_urn in enumerate(station_urns):
            if station_urns_sel is not None:
                sml_str = sosgc.describe_sensor(procedure=station_urn,
                                                outputFormat=oFrmt,
                                                timeout=200)
                sml = SensorML(sml_str)

            else:
                sml = sml_lst[station_idx]

            # debug:
            # if self.verbose:
            #    self.log.write(unicode(etree.tostring(sml._root)))

            ds = IoosDescribeSensor(sml._root)

            pos = testXMLValue(
                ds.system.location.find(self.nsp('gml:Point/gml:pos')))

            system_el = sml._root.findall(self.nsp('sml:member'))[0].find(
                self.nsp('sml:System'))

            # Parse the DocumentList into a dict storing documents by index value 'name' (may cause index duplication
            # errors but there is not enough information in SensorML for alternatives)
            # Assume that member corresponds to xlink:arcrole="urn:ogc:def:role:webPage"
            documents = system_el.findall(
                self.nsp('sml:documentation/sml:DocumentList/sml:member'))
            documents_dct = {}
            for d in documents:
                document = Documentation(d)
                name = testXMLAttribute(d, "name")
                # url = document.documents[0].url
                documents_dct[name] = document

            contacts = system_el.findall(
                self.nsp('sml:contact/sml:ContactList/sml:member'))
            contacts_dct = {}
            for c in contacts:
                contact = Contact(c)
                role = contact.role.split('/')[-1]
                contacts_dct[role] = contact

            sweQuants = system_el.findall(
                self.nsp('sml:outputs/sml:OutputList/sml:output/swe:Quantity'))
            quant_lst = [
                sweQuant.attrib['definition'] for sweQuant in sweQuants
            ]
            parameter_lst = [sweQuant.split('/')[-1] for sweQuant in quant_lst]

            # attempt to read beginPosition, if available:
            beginPosition = testXMLValue(
                system_el.find(
                    self.nsp(
                        'sml:validTime/gml:TimePeriod/gml:beginPosition')))
            try:
                begin_service_date = parser.parse(beginPosition)
            except AttributeError as e:
                begin_service_date = None

            station = OrderedDict()
            station['station_urn'] = station_urn
            station['sos_url'] = sos_url_params_esc
            station['lon'] = float(pos.split()[1])
            station['lat'] = float(pos.split()[0])

            station['shortName'] = ds.shortName
            station['longName'] = ds.longName
            station['wmoID'] = ds.get_ioos_def('wmoID', 'identifier', ont)
            station['serverName'] = self.server_name

            # Some capabilities-level metadata:
            station['title'] = sosgc.identification.title
            station['abstract'] = sosgc.identification.abstract
            station['keywords'] = sosgc.identification.keywords
            station['begin_service_date'] = begin_service_date

            # Beware that a station can have >1 classifier of the same type
            # This code does not accommodate that possibility
            station['platformType'] = ds.platformType
            station['parentNetwork'] = ds.get_ioos_def('parentNetwork',
                                                       'classifier', ont)
            station['sponsor'] = ds.get_ioos_def('sponsor', 'classifier', ont)

            # store some nested dictionaries in 'station' for appopriate SensorML sources:
            station['contacts_dct'] = contacts_dct
            station['documents_dct'] = documents_dct

            # MW: the 'operator_' and 'publisher_' attributes can be removed bc they are not used
            # in the template code currently in favor of 'contacts_dct'
            # station['operatorSector'] = ds.get_ioos_def('operatorSector', 'classifier', ont)
            # station['operator_org'] = contacts_dct['operator'].organization
            # station['operator_country'] = contacts_dct['operator'].country
            # station['operator_url'] = contacts_dct['operator'].url
            # station['operator_email'] = contacts_dct['operator'].email

            # station['publisher'] = ds.get_ioos_def('publisher', 'classifier', ont)
            # station['publisher_org'] = contacts_dct['publisher'].organization
            # station['publisher_url'] = contacts_dct['publisher'].url
            # station_dct['publisher_email'] = contacts_dct['publisher'].electronicMailAddress

            station['starting'] = ds.starting
            station['ending'] = ds.ending
            # station['starting_isostr'] = datetime.isoformat(ds.starting)
            # station['ending_isostr'] = datetime.isoformat(ds.ending)

            station['parameter_uris'] = ','.join(quant_lst)
            station['parameters'] = ','.join(parameter_lst)
            station['variables'] = [var.split('/')[-1] for var in ds.variables]

            # debug:
            if self.verbose:
                self.log.write(u"\nProcessing station: {station}\n".format(
                    station=station_urn))
                print("\nProcessing station: {station}".format(
                    station=station_urn))
                for var in ds.variables:
                    self.log.write(u"variable: {var}\n".format(var=var))
                    print("variable: {var}".format(var=var))

            # print(sosgc.contents)
            # for id, offering in sosgc.contents.iteritems():
            #    print("sosgc.contents: {item}".format(item=id))

            # parse 'responseFormat' values and populate list:
            # response_formats = sosgc.contents[station_urn].response_formats
            response_formats = []
            for id, sosgc.content in sosgc.contents.items():
                if sosgc.content.name == station_urn:
                    response_formats = sosgc.content.response_formats
            # response_formats = [ sosgc.content.response_formats for id, sosgc.content in sosgc.contents.items() if sosgc.content.name == station_urn ]

            # subset responseFormats (response_formats) for download links matching those passed in --response_formats parameter
            # (or 'application/json,application/zip; subtype=x-netcdf' by default):
            download_formats = [
                response_format for response_format in response_formats
                if response_format in self.response_formats
            ]
            station['response_formats'] = response_formats
            station['download_formats'] = download_formats

            if self.verbose:
                for format in response_formats:
                    self.log.write(
                        u"responseFormat: {format}\n".format(format=format))
                    print("responseFormat: {format}".format(format=format))
                for format in download_formats:
                    self.log.write(
                        u"downloadFormats: {format}\n".format(format=format))
                    print("downloadFormats: {format}".format(format=format))

            # calculate event_time using self.getobs_req_hours:
            if ds.starting is not None and ds.ending is not None:
                event_time = "{begin:%Y-%m-%dT%H:%M:%S}/{end:%Y-%m-%dT%H:%M:%S}\n".format(
                    begin=ds.ending - timedelta(hours=self.getobs_req_hours),
                    end=ds.ending)
            else:
                now = datetime.now(pytz.utc)
                then = now - timedelta(hours=self.getobs_req_hours)
                event_time = "{begin:%Y-%m-%dT%H:%M:%S}/{end:%Y-%m-%dT%H:%M:%S}\n".format(
                    begin=then, end=now)
                if self.verbose:
                    self.log.write(
                        u"then: {then:%Y-%m-%dT%H:%M:%S%z}, now: {now:%Y-%m-%dT%H:%M:%S%z}\n"
                        .format(then=then, now=now))
                    print(
                        "then: {then:%Y-%m-%dT%H:%M:%S%z}, now: {now:%Y-%m-%dT%H:%M:%S%z}"
                        .format(then=then, now=now))

            if self.verbose:
                self.log.write(u"eventTime: {time}\n".format(time=event_time))
                print("eventTime: {time}".format(time=event_time))

            # create a dict to store parameters for valid example GetObservation requests for station:
            getobs_req_dct = {}
            # populate a parameters dictionary for download links for each 'observedProperty' type and secondly for each 'responseFormat' per observedProperty:
            getobs_params_base = {
                'service': 'SOS',
                'request': 'GetObservation',
                'version': '1.0.0',
                'offering': station_urn,
                'eventTime': event_time
            }
            for variable in ds.variables:
                getobs_params = getobs_params_base.copy()
                getobs_params['observedProperty'] = variable
                variable = variable.split('/')[-1]
                for format in download_formats:
                    getobs_params['responseFormat'] = format
                    getobs_request_url_encoded = sos_url + '?' + urlencode(
                        getobs_params)
                    getobs_request_url = unquote(getobs_request_url_encoded)
                    getobs_request_url_esc = getobs_request_url.replace(
                        "&", "&amp;")
                    getobs_req_dct[variable + '-' + format] = {
                        'variable': variable,
                        'url': getobs_request_url_esc,
                        'format_type': self.RESPONSE_FORMAT_TYPE_MAP[format],
                        'format_name': self.RESPONSE_FORMAT_NAME_MAP[format]
                    }
                    if self.verbose:
                        self.log.write(
                            u"getobs_request_url (var: {variable}): {getobs_request_url}\ngetobs_request_url_esc (var: {variable}): {getobs_request_url_esc}\n"
                            .format(
                                variable=variable.split("/")[-1],
                                getobs_request_url=getobs_request_url,
                                getobs_request_url_esc=getobs_request_url_esc))
                        print(
                            "getobs_request_url (var: {variable}): {getobs_request_url}\ngetobs_request_url_esc (var: {variable}): {getobs_request_url_esc}"
                            .format(
                                variable=variable.split("/")[-1],
                                getobs_request_url=getobs_request_url,
                                getobs_request_url_esc=getobs_request_url_esc))

            # ToDo: finish adding the 'getobs_req_dct' to the output template
            station['getobs_req_dct'] = getobs_req_dct

            station_recs.append(station)

        stations_df = pd.DataFrame.from_records(station_recs,
                                                columns=station.keys())
        stations_df.index = stations_df['station_urn']

        return stations_df