def _loadInventory(self): Logging.debug("loading inventory") dbr = DataModel.DatabaseReader(self.database()) self._inv = DataModel.Inventory() # Load networks and stations staCount = 0 for i in xrange(dbr.loadNetworks(self._inv)): staCount += dbr.load(self._inv.network(i)) Logging.debug("loaded %i stations from %i networks" % (staCount, self._inv.networkCount())) # Load sensors, skip calibrations (not needed by StationXML exporter) Logging.debug("loaded %i sensors" % dbr.loadSensors(self._inv)) # Load datalogger and its decimations, skip calibrations (not needed by # StationXML exporter) deciCount = 0 for i in xrange(dbr.loadDataloggers(self._inv)): deciCount += dbr.loadDecimations(self._inv.datalogger(i)) Logging.debug("loaded %i decimations from %i dataloggers" % (deciCount, self._inv.dataloggerCount())) # Load responses resPAZCount = dbr.loadResponsePAZs(self._inv) resFIRCount = dbr.loadResponseFIRs(self._inv) resPolCount = dbr.loadResponsePolynomials(self._inv) resCount = resPAZCount + resFIRCount + resPolCount Logging.debug( "loaded %i responses (PAZ: %i, FIR: %i, Poly: %i)" % (resCount, resPAZCount, resFIRCount, resPolCount) ) Logging.info("inventory loaded")
def resumeProducing(self): rec = None try: rec = self.rsInput.next() except Exception, e: Logging.warning("%s" % str(e)) if self.written == 0: # read first record to test if any data exists at all if not rec: msg = "no waveform data found" data = HTTP.renderErrorPage(self.req, http.NO_CONTENT, msg, self.ro) if data: self.req.write(data) self.req.unregisterProducer() self.req.finish() return self.req.setHeader('Content-Type', 'application/vnd.fdsn.mseed') self.req.setHeader('Content-Disposition', "attachment; " \ "filename=%s" % self.fileName) if not rec: self.req.unregisterProducer() Logging.debug("%s: returned %i bytes of mseed data" % ( self.ro.service, self.written)) utils.accessLog(self.req, self.ro, http.OK, self.written, None) self.req.finish() return data = rec.raw().str() self.req.write(data) self.written += len(data)
def _finish(self): if self.stopped: return if self.written == 0: msg = "no waveform data found" errorpage = HTTP.renderErrorPage(self.req, http.NO_CONTENT, msg, VERSION, self.ro) if errorpage: self.req.write(errorpage) for tracker in self.trackerList: tracker.volume_status("fdsnws", "NODATA", 0, "") tracker.request_status("END", "") else: Logging.debug("%s: returned %i bytes of mseed data" % (self.ro.service, self.written)) utils.accessLog(self.req, self.ro, http.OK, self.written, None) for tracker in self.trackerList: tracker.volume_status("fdsnws", "OK", self.written, "") tracker.request_status("END", "") self.req.unregisterProducer() self.req.finish()
def _processRequest(self, req, ro, dbq, exp): if req._disconnected: return False DataModel.PublicObject.SetRegistrationEnabled(False) # query event(s) ep = DataModel.EventParameters() if ro.eventIDs: for eID in ro.eventIDs: event = dbq.getEventByPublicID(eID) event = DataModel.Event.Cast(event) if event: ep.add(event) else: self._findEvents(ep, ro, dbq) if ep.eventCount() == 0: msg = "no matching events found" utils.writeTS(req, HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro)) return False Logging.debug("events found: %i" % ep.eventCount()) if ro.format == 'csv' or not exp: req.setHeader('Content-Type', 'text/plain') else: req.setHeader('Content-Type', 'application/xml') if exp: return self._processRequestExp(req, ro, dbq, exp, ep) return self._processRequestText(req, ro, dbq, ep)
def _loadInventory(self): Logging.debug("loading inventory") dbr = DataModel.DatabaseReader(self.database()) self._inv = DataModel.Inventory() # Load networks and stations staCount = 0 for i in xrange(dbr.loadNetworks(self._inv)): staCount += dbr.load(self._inv.network(i)) Logging.debug("loaded %i stations from %i networks" % (staCount, self._inv.networkCount())) # Load sensors, skip calibrations (not needed by StationXML exporter) Logging.debug("loaded %i sensors" % dbr.loadSensors(self._inv)) # Load datalogger and its decimations, skip calibrations (not needed by # StationXML exporter) deciCount = 0 for i in xrange(dbr.loadDataloggers(self._inv)): deciCount += dbr.loadDecimations(self._inv.datalogger(i)) Logging.debug("loaded %i decimations from %i dataloggers" % (deciCount, self._inv.dataloggerCount())) # Load responses resPAZCount = dbr.loadResponsePAZs(self._inv) resFIRCount = dbr.loadResponseFIRs(self._inv) resPolCount = dbr.loadResponsePolynomials(self._inv) resCount = resPAZCount + resFIRCount + resPolCount Logging.debug("loaded %i responses (PAZ: %i, FIR: %i, Poly: %i)" % (resCount, resPAZCount, resFIRCount, resPolCount)) Logging.info("inventory loaded")
def charstar(string): """Convert a string (unicode in python3, bytes in python2) to a char* usable as an argument to the seiscomp SWIG API. Depending on what version of seiscomp and python we're using, and whether seiscomp's SWIG bindings were generated with SWIG_PYTHON_2_UNICODE or not, the correct type to feed to C++ string/char* arguments can vary. Unfortunately, the seiscomp3 backwards-compat python wrapper doesn't compensate for this. I couldn't find a simple way to introspect the correct python type, so the first time this method is called it attempts to log a message to find out.""" global _charstar_is_bytes if _charstar_is_bytes is None: # first time we've been called - we need to detect. try: Logging.debug(b"Detected SWIG char* type as bytes") _charstar_is_bytes = True except TypeError: Logging.debug(u"Detecting SWIG char* type as unicode") _charstar_is_bytes = False if _charstar_is_bytes: if isinstance(string, bytes): return string else: return string.encode('utf-8') else: if isinstance(string, bytes): return string.decode('utf-8') else: return string
def _processRequest(self, req, ro, dbq, exp): if req._disconnected: return False DataModel.PublicObject.SetRegistrationEnabled(False) # query event(s) ep = DataModel.EventParameters() if ro.eventIDs: for eID in ro.eventIDs: obj = dbq.getEventByPublicID(eID) e = DataModel.Event.Cast(obj) if not e: continue if self._eventTypeWhitelist or self._eventTypeBlacklist: eType = -1 try: eType = e.type() except ValueError: pass if self._eventTypeWhitelist and \ not eType in self._eventTypeWhitelist: continue if self._eventTypeBlacklist and \ eType in self._eventTypeBlacklist: continue if self._evaluationMode is not None: obj = dbq.getObject(DataModel.Origin.TypeInfo(), e.preferredOriginID()) o = DataModel.Origin.Cast(obj) try: if o is None or \ o.evaluationMode() != self._evaluationMode: continue except ValueError: continue ep.add(e) else: self._findEvents(ep, ro, dbq) if ep.eventCount() == 0: msg = "no matching events found" self.writeErrorPage(req, http.NO_CONTENT, msg, ro) return True Logging.debug("events found: %i" % ep.eventCount()) if ro.format == 'csv' or not exp: req.setHeader('Content-Type', 'text/plain') else: req.setHeader('Content-Type', 'application/xml') if exp: return self._processRequestExp(req, ro, dbq, exp, ep) return self._processRequestText(req, ro, dbq, ep)
def onRequestServed(success, req): if req._disconnected: Logging.debug("Request aborted") return Logging.debug("Request %s" % ( "successfully served" if success else "failed")) reactor.callFromThread(req.finish)
def getResourceFor(self, request): Logging.debug("request (%s): %s" % (request.getClientIP(), request.uri)) request.setHeader('Server', "SeisComP3-FDSNWS/%s" % VERSION) request.setHeader('Access-Control-Allow-Origin', '*') request.setHeader('Access-Control-Allow-Headers', 'Authorization') request.setHeader('Access-Control-Expose-Headers', 'WWW-Authenticate') return server.Site.getResourceFor(self, request)
def getResourceFor(self, request): Logging.debug("request (%s): %s" % (request.getClientIP(), request.uri)) request.setHeader('Server', "SeisComP3-FDSNWS/%s" % VERSION) request.setHeader('Access-Control-Allow-Origin', '*') request.setHeader('Access-Control-Allow-Headers', 'Authorization') request.setHeader('Access-Control-Expose-Headers', 'WWW-Authenticate') return server.Site.getResourceFor(self, request)
def _processRequest(self, req, ro, dbq, exp): if req._disconnected: return False DataModel.PublicObject.SetRegistrationEnabled(False) # query event(s) ep = DataModel.EventParameters() if ro.eventIDs: for eID in ro.eventIDs: obj = dbq.getEventByPublicID(eID) e = DataModel.Event.Cast(obj) if not e: continue if self._eventTypeWhitelist or self._eventTypeBlacklist: eType = None try: eType = DataModel.EEventTypeNames_name(e.type()) except ValueException: pass if self._eventTypeWhitelist and \ not eType in self._eventTypeWhitelist: continue if self._eventTypeBlacklist and \ eType in self._eventTypeBlacklist: continue if self._evaluationMode is not None: obj = dbq.getObject(DataModel.Origin.TypeInfo(), e.preferredOriginID()) o = DataModel.Origin.Cast(obj) try: if o is None or \ o.evaluationMode() != self._evaluationMode: continue except ValueException: continue ep.add(e) else: self._findEvents(ep, ro, dbq) if ep.eventCount() == 0: msg = "no matching events found" data = HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro) if data: utils.writeTS(req, data) return True Logging.debug("events found: %i" % ep.eventCount()) if ro.format == 'csv' or not exp: req.setHeader('Content-Type', 'text/plain') else: req.setHeader('Content-Type', 'application/xml') if exp: return self._processRequestExp(req, ro, dbq, exp, ep) return self._processRequestText(req, ro, dbq, ep)
def emit(self, record): msg = self.format(record) if record.levelname == 'DEBUG': Logging.debug(charstar(msg)) elif record.levelname == 'INFO': Logging.info(charstar(msg)) elif record.levelname == 'WARNING': Logging.warning(charstar(msg)) else: Logging.error(charstar(msg))
def run(self): rules = self.rules iv = Client.Inventory.Instance().inventory() if not rules: return False if not iv: return False Logging.debug("Loaded %d networks" % iv.networkCount()) if self.outputFile is None: DataModel.Notifier.Enable() self.setInterpretNotifierEnabled(True) for net in self._loop(iv.network, iv.networkCount()): (ncode, nstart, nend) = self._collect(net) key = rules.findKey(ncode, nstart, nend) if not key: continue att = rules.getNetworkAttributes(key) self._modifyInventory("network", net, att) Logging.info("%s %s" % (ncode, att)) for sta in self._loop(net.station, net.stationCount()): (scode, sstart, send) = self._collect(sta) att = rules.getStationAttributes(key, ncode, scode, None, None, sstart, send) self._modifyInventory("station", sta, att) if att: Logging.info(" %s %s" % (scode, att)) for loc in self._loop(sta.sensorLocation, sta.sensorLocationCount()): (lcode, lstart, lend) = self._collect(loc) att = rules.getStationAttributes(key, ncode, scode, lcode, None, lstart, lend) self._modifyInventory("location", loc, att) if att: Logging.info(" %s %s" % (lcode, att)) for cha in self._loop(loc.stream, loc.streamCount()): (ccode, cstart, cend) = self._collect(cha) att = rules.getStationAttributes(key, ncode, scode, lcode, ccode, cstart, cend) self._modifyInventory("channel", cha, att) if att: Logging.info(" %s %s" % (ccode, att)) for sensor in self._loop(iv.sensor, iv.sensorCount()): att = rules.getInstrumentsAttributes(sensor.name(), "Se") self._modifyInventory("sensor", sensor, att) for datalogger in self._loop(iv.datalogger, iv.dataloggerCount()): att = rules.getInstrumentsAttributes(datalogger.name(), "Dl") self._modifyInventory("datalogger", datalogger, att) return True
def run(self): rules = self.rules iv = Client.Inventory.Instance().inventory() if not rules: return False if not iv: return False Logging.debug("Loaded %d networks" % iv.networkCount()) if self.outputFile is None: DataModel.Notifier.Enable() self.setInterpretNotifierEnabled(True) for net in self._loop(iv.network, iv.networkCount()): (ncode, nstart, nend) = self._collect(net) key = rules.findKey(ncode, nstart, nend) if not key: continue att = rules.getNetworkAttributes(key) self._modifyInventory("network", net, att) Logging.info("%s %s" % (ncode, att)) for sta in self._loop(net.station, net.stationCount()): (scode, sstart, send) = self._collect(sta) att = rules.getStationAttributes(key, ncode, scode, None, None, sstart, send) self._modifyInventory("station", sta, att) if att: Logging.info(" %s %s" % (scode, att)) for loc in self._loop(sta.sensorLocation, sta.sensorLocationCount()): (lcode, lstart, lend) = self._collect(loc) att = rules.getStationAttributes(key, ncode, scode, lcode, None, lstart, lend) self._modifyInventory("location", loc, att) if att: Logging.info(" %s %s" % (lcode, att)) for cha in self._loop(loc.stream, loc.streamCount()): (ccode, cstart, cend) = self._collect(cha) att = rules.getStationAttributes( key, ncode, scode, lcode, ccode, cstart, cend) self._modifyInventory("channel", cha, att) if att: Logging.info(" %s %s" % (ccode, att)) for sensor in self._loop(iv.sensor, iv.sensorCount()): att = rules.getInstrumentsAttributes(sensor.name(), "Se") self._modifyInventory("sensor", sensor, att) for datalogger in self._loop(iv.datalogger, iv.dataloggerCount()): att = rules.getInstrumentsAttributes(datalogger.name(), "Dl") self._modifyInventory("datalogger", datalogger, att) return True
def stopProducing(self): self.stopped = True Logging.debug("%s: returned %i bytes of mseed data (not completed)" % (self.ro.service, self.written)) utils.accessLog(self.req, self.ro, http.OK, self.written, "not completed") for tracker in self.trackerList: tracker.volume_status("fdsnws", "ERROR", self.written, "") tracker.request_status("END", "") self.req.unregisterProducer() self.req.finish()
class _WaveformProducer: def __init__(self, req, ro, rs, fileName, tracker): self.req = req self.ro = ro self.rs = rs # keep a reference to avoid crash self.rsInput = RecordInput(rs, Array.INT, Record.SAVE_RAW) self.fileName = fileName self.written = 0 self.tracker = tracker def resumeProducing(self): rec = None try: rec = self.rsInput.next() except Exception, e: Logging.warning("%s" % str(e)) if self.written == 0: # read first record to test if any data exists at all if not rec: msg = "no waveform data found" data = HTTP.renderErrorPage(self.req, http.NO_CONTENT, msg, self.ro) if data: self.req.write(data) self.req.unregisterProducer() self.req.finish() if self.tracker: self.tracker.volume_status("fdsnws", "NODATA", 0, "") self.tracker.request_status("END", "") return self.req.setHeader('Content-Type', 'application/vnd.fdsn.mseed') self.req.setHeader('Content-Disposition', "attachment; " \ "filename=%s" % self.fileName) if not rec: self.req.unregisterProducer() Logging.debug("%s: returned %i bytes of mseed data" % (self.ro.service, self.written)) utils.accessLog(self.req, self.ro, http.OK, self.written, None) self.req.finish() if self.tracker: self.tracker.volume_status("fdsnws", "OK", self.written, "") self.tracker.request_status("END", "") return data = rec.raw().str() self.req.write(data) self.written += len(data)
def send_notifiers(self, group): Nsize = DataModel.Notifier.Size() if Nsize > 0: Logging.info("trying to apply %d change%s" % (Nsize, "s" if Nsize != 1 else "")) else: Logging.info("no changes to apply") return 0 Nmsg = DataModel.Notifier.GetMessage(True) it = Nmsg.iter() msg = DataModel.NotifierMessage() maxmsg = 100 sent = 0 mcount = 0 try: try: while it.get(): msg.attach(DataModel.Notifier_Cast(it.get())) mcount += 1 if msg and mcount == maxmsg: sent += mcount Logging.debug("sending message (%5.1f %%)" % (sent / float(Nsize) * 100.0)) self.send(group, msg) msg.clear() mcount = 0 self.sync() it.next() except: pass finally: if msg.size(): Logging.debug("sending message (%5.1f %%)" % 100.0) self.send(group, msg) msg.clear() self.sync() Logging.info("done") return mcount
def send_notifiers(self, group): Nsize = DataModel.Notifier.Size() if Nsize > 0: Logging.info("trying to apply %d change%s" % (Nsize, "s" if Nsize != 1 else "")) else: Logging.info("no changes to apply") return 0 Nmsg = DataModel.Notifier.GetMessage(True) it = Nmsg.iter() msg = DataModel.NotifierMessage() maxmsg = 100 sent = 0 mcount = 0 try: try: while it.get(): msg.attach(DataModel.Notifier_Cast(it.get())) mcount += 1 if msg and mcount == maxmsg: sent += mcount Logging.debug("sending message (%5.1f %%)" % (sent / float(Nsize) * 100.0)) self.send(group, msg) msg.clear() mcount = 0 self.sync() it.next() except: pass finally: if msg.size(): Logging.debug("sending message (%5.1f %%)" % 100.0) self.send(group, msg) msg.clear() self.sync() Logging.info("done") return mcount
def _finish(self): if self.written == 0: msg = "no waveform data found" HTTP.renderErrorPage(self.req, http.NO_CONTENT, msg, self.ro) if self.tracker: self.tracker.volume_status("fdsnws", "NODATA", 0, "") self.tracker.request_status("END", "") else: Logging.debug("%s: returned %i bytes of mseed data" % ( self.ro.service, self.written)) utils.accessLog(self.req, self.ro, http.OK, self.written, None) if self.tracker: self.tracker.volume_status("fdsnws", "OK", self.written, "") self.tracker.request_status("END", "") self.req.unregisterProducer() self.req.finish()
def run(self): if self._xmlFile: Logging.debug("running in batch mode") Logging.debug("input file is %s" % self._xmlFile) return self._runBatchMode() self._xmlFile = tempfile.mktemp(".xml", dir=self._tmpDir) Logging.debug("running in stream mode") Logging.debug("temp file is %s" % self._xmlFile) status = self._runStreamMode() if os.path.exists(self._xmlFile): os.unlink(self._xmlFile) return status
def run(self): if self._xmlFile: Logging.debug("running in batch mode") Logging.debug("input file is %s" % self._xmlFile) return self._runBatchMode() self._xmlFile = tempfile.mktemp(".xml", dir=self._tmpDir) Logging.debug("running in stream mode") Logging.debug("temp file is %s" % self._xmlFile) status = self._runStreamMode() if os.path.exists(self._xmlFile): os.unlink(self._xmlFile) return status
def onRequestServed(success, req): if req._disconnected: Logging.debug("request aborted") return if success: Logging.debug("request successfully served") else: Logging.debug("request failed") reactor.callFromThread(req.finish)
def onFinish(result, req): Logging.debug("finish value = %s" % str(result)) if isinstance(result, Failure): err = result.value if isinstance(err, defer.CancelledError): Logging.error("request canceled") return Logging.error("%s %s" % (result.getErrorMessage(), traceback.format_tb(result.getTracebackObject()))) else: if result: Logging.debug("request successfully served") else: Logging.debug("request failed") reactor.callFromThread(req.finish)
def onFinish(result, req): Logging.debug("finish value = %s" % str(result)) if isinstance(result, Failure): err = result.value if isinstance(err, defer.CancelledError): Logging.error("request canceled") return Logging.error("%s %s" % (result.getErrorMessage(), traceback.format_tb(result.getTracebackObject()))) else: if result: Logging.debug("request successfully served") else: Logging.debug("request failed") reactor.callFromThread(req.finish)
def init(self): if not Client.Application.init(self): return False try: start = self.commandline().optionString("begin") except: start = "1900-01-01T00:00:00Z" self._startTime = _parseTime(start) if self._startTime is None: Logging.error("Wrong 'begin' format '%s'" % start) return False Logging.debug("Setting start to %s" % self._startTime.toString("%FT%TZ")) try: end = self.commandline().optionString("end") except: end = "2500-01-01T00:00:00Z" self._endTime = _parseTime(end) if self._endTime is None: Logging.error("Wrong 'end' format '%s'" % end) return False Logging.debug("Setting end to %s" % self._endTime.toString("%FT%TZ")) try: self._delimiter = self.commandline().optionString("delimiter") except: self._delimiter = "\n" try: modifiedAfter = self.commandline().optionString("modified-after") self._modifiedAfterTime = _parseTime(modifiedAfter) if self._modifiedAfterTime is None: Logging.error("Wrong 'modified-after' format '%s'" % modifiedAfter) return False Logging.debug("Setting 'modified-after' time to %s" % self._modifiedAfterTime.toString("%FT%TZ")) except: pass return True
def init(self): if not Client.Application.init(self): return False try: start = self.commandline().optionString("begin") except: start = "1900-01-01T00:00:00Z" self._startTime = _parseTime(start) if self._startTime is None: Logging.error("Wrong 'begin' format '%s'" % start) return False Logging.debug("Setting start to %s" % self._startTime.toString("%FT%TZ")) try: end = self.commandline().optionString("end") except: end = "2500-01-01T00:00:00Z" self._endTime = _parseTime(end) if self._endTime is None: Logging.error("Wrong 'end' format '%s'" % end) return False Logging.debug("Setting end to %s" % self._endTime.toString("%FT%TZ")) try: self._delimiter = self.commandline().optionString("delimiter") except: self._delimiter = "\n" try: modifiedAfter = self.commandline().optionString("modified-after") self._modifiedAfterTime = _parseTime(modifiedAfter) if self._modifiedAfterTime is None: Logging.error("Wrong 'modified-after' format '%s'" % modifiedAfter) return False Logging.debug("Setting 'modified-after' time to %s" % self._modifiedAfterTime.toString("%FT%TZ")) except: pass return True
def _processRequestText(self, req, ro, dbq, ep): lineCount = 0 line = "#EventID|Time|Latitude|Longitude|Depth/km|Author|Catalog|" \ "Contributor|ContributorID|MagType|Magnitude|MagAuthor|" \ "EventLocationName\n" df = "%FT%T.%f" utils.writeTS(req, line) byteCount = len(line) # add related information for iEvent in xrange(ep.eventCount()): e = ep.event(iEvent) eID = e.publicID() # query for preferred origin obj = dbq.getObject(DataModel.Origin.TypeInfo(), e.preferredOriginID()) o = DataModel.Origin.Cast(obj) if o is None: Logging.warning("preferred origin of event '%s' not found: %s" % ( eID, e.preferredOriginID())) continue # depth try: depth = str(o.depth().value()) except ValueException: depth = '' # author if self._hideAuthor: author = '' else: try: author = o.creationInfo().author() except ValueException: author = '' # contributor try: contrib = e.creationInfo().agencyID() except ValueException: contrib = '' # query for preferred magnitude (if any) mType, mVal, mAuthor = '', '', '' if e.preferredMagnitudeID(): obj = dbq.getObject(DataModel.Magnitude.TypeInfo(), e.preferredMagnitudeID()) m = DataModel.Magnitude.Cast(obj) if m is not None: mType = m.type() mVal = str(m.magnitude().value()) if self._hideAuthor: mAuthor = '' else: try: mAuthor = m.creationInfo().author() except ValueException: pass # event description dbq.loadEventDescriptions(e) region = '' for i in xrange(e.eventDescriptionCount()): ed = e.eventDescription(i) if ed.type() == DataModel.REGION_NAME: region = ed.text() break if req._disconnected: return False line = "%s|%s|%f|%f|%s|%s||%s|%s|%s|%s|%s|%s\n" % ( eID, o.time().value().toString(df), o.latitude().value(), o.longitude().value(), depth, author, contrib, eID, mType, mVal, mAuthor, region) utils.writeTS(req, line) lineCount +=1 byteCount += len(line) # write response Logging.debug("%s: returned %i events (total bytes: %i) " % ( ro.service, lineCount, byteCount)) utils.accessLog(req, ro, http.OK, byteCount, None) return True
def _processRequestExp(self, req, ro, dbq, exp, ep): objCount = ep.eventCount() maxObj = Application.Instance()._queryObjects if not HTTP.checkObjects(req, objCount, maxObj): return False pickIDs = set() if ro.picks is None: ro.picks = True # add related information for iEvent in xrange(ep.eventCount()): if req._disconnected: return False e = ep.event(iEvent) if self._hideAuthor: self._removeAuthor(e) originIDs = set() magIDs = set() magIDs.add(e.preferredMagnitudeID()) # eventDescriptions and comments objCount += dbq.loadEventDescriptions(e) if ro.comments: objCount += self._loadComments(dbq, e) if not HTTP.checkObjects(req, objCount, maxObj): return False # origin references: either all or preferred only dbIter = dbq.getObjects(e, DataModel.OriginReference.TypeInfo()) for obj in dbIter: oRef = DataModel.OriginReference.Cast(obj) if oRef is None: continue if ro.allOrigins: e.add(oRef) originIDs.add(oRef.originID()) elif oRef.originID() == e.preferredOriginID(): e.add(oRef) originIDs.add(oRef.originID()) dbIter.close() objCount += e.originReferenceCount() # focalMechanism references: either none, preferred only or all if ro.fm or ro.allFMs: dbIter = dbq.getObjects(e, DataModel.FocalMechanismReference.TypeInfo()) for obj in dbIter: fmRef = DataModel.FocalMechanismReference.Cast(obj) if fmRef is None: continue if ro.allFMs: e.add(fmRef) elif fmRef.focalMechanismID() == e.preferredFocalMechanismID(): e.add(fmRef) dbIter.close() objCount += e.focalMechanismReferenceCount() if not HTTP.checkObjects(req, objCount, maxObj): return False # focal mechanisms: process before origins to add derived origin to # originID list since it may be missing from origin reference list for iFMRef in xrange(e.focalMechanismReferenceCount()): if req._disconnected: return False fmID = e.focalMechanismReference(iFMRef).focalMechanismID() obj = dbq.getObject(DataModel.FocalMechanism.TypeInfo(), fmID) fm = DataModel.FocalMechanism.Cast(obj) if fm is None: continue ep.add(fm) objCount += 1 if self._hideAuthor: self._removeAuthor(fm) # comments if ro.comments: objCount += self._loadComments(dbq, fm) # momentTensors objCount += dbq.loadMomentTensors(fm) if not HTTP.checkObjects(req, objCount, maxObj): return False for iMT in xrange(fm.momentTensorCount()): mt = fm.momentTensor(iMT) originIDs.add(mt.derivedOriginID()) magIDs.add(mt.momentMagnitudeID()) if self._hideAuthor: self._removeAuthor(mt) if ro.comments: for iMT in xrange(fm.momentTensorCount()): objCount += self._loadComments(dbq, mt) objCount += dbq.loadDataUseds(mt); objCount += dbq.loadMomentTensorPhaseSettings(mt); if ro.staMTs: objCount += dbq.loadMomentTensorStationContributions(mt); for iStaMT in xrange(mt.momentTensorStationContributionCount()): objCount += dbq.load(mt.momentTensorStationContribution(iStaMT)) if not HTTP.checkObjects(req, objCount, maxObj): return False # find ID of origin containing preferred Magnitude if e.preferredMagnitudeID(): obj = dbq.getObject(DataModel.Magnitude.TypeInfo(), e.preferredMagnitudeID()) m = DataModel.Magnitude.Cast(obj) if m is not None: oID = dbq.parentPublicID(m) if oID: originIDs.add(oID) # origins for oID in originIDs: if req._disconnected: return False obj = dbq.getObject(DataModel.Origin.TypeInfo(), oID) o = DataModel.Origin.Cast(obj) if o is None: continue ep.add(o) objCount += 1 if self._hideAuthor: self._removeAuthor(o) # comments if ro.comments: objCount += self._loadComments(dbq, o) if not HTTP.checkObjects(req, objCount, maxObj): return False # magnitudes dbIter = dbq.getObjects(oID, DataModel.Magnitude.TypeInfo()) for obj in dbIter: mag = DataModel.Magnitude.Cast(obj) if mag is None: continue if ro.allMags: o.add(mag) elif mag.publicID() in magIDs: o.add(mag) dbIter.close() if self._hideAuthor: self._removeAuthor(mag) objCount += o.magnitudeCount() if ro.comments: for iMag in xrange(o.magnitudeCount()): objCount += self._loadComments(dbq, o.magnitude(iMag)) if not HTTP.checkObjects(req, objCount, maxObj): return False # TODO station magnitudes, amplitudes # - added pick id for each pick referenced by amplitude # arrivals if ro.arrivals: objCount += dbq.loadArrivals(o) if self._removeAuthor: for iArrival in xrange(o.arrivalCount()): self._removeAuthor(o.arrival(iArrival)) # collect pick IDs if requested if ro.picks: for iArrival in xrange(o.arrivalCount()): pickIDs.add(o.arrival(iArrival).pickID()) if not HTTP.checkObjects(req, objCount, maxObj): return False # picks if pickIDs: objCount += len(pickIDs) if not HTTP.checkObjects(req, objCount, maxObj): return False for pickID in pickIDs: obj = dbq.getObject(DataModel.Pick.TypeInfo(), pickID) pick = DataModel.Pick.Cast(obj) if pick is not None: if self._hideAuthor: self._removeAuthor(pick) if ro.comments: objCount += self._loadComments(dbq, pick) ep.add(pick) if not HTTP.checkObjects(req, objCount, maxObj): return False # write response sink = utils.Sink(req) if not exp.write(sink, ep): return False Logging.debug("%s: returned %i events and %i origins (total " \ "objects/bytes: %i/%i)" % (ro.service, ep.eventCount(), ep.originCount(), objCount, sink.written)) utils.accessLog(req, ro, http.OK, sink.written, None) return True
def _processRequest(self, req, ro): if ro.quality != 'B' and ro.quality != 'M': msg = "quality other than 'B' or 'M' not supported" return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro) if ro.minimumLength: msg = "enforcing of minimum record length not supported" return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro) if ro.longestOnly: msg = "limitation to longest segment not supported" return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro) app = Application.Instance() ro._checkTimes(app._realtimeGap) maxSamples = None if app._samplesM is not None: maxSamples = app._samplesM * 1000000 samples = 0 app = Application.Instance() if app._trackdbEnabled: userid = ro.userName or app._trackdbDefaultUser reqid = 'ws' + str(int(round(time.time() * 1000) - 1420070400000)) xff = req.requestHeaders.getRawHeaders("x-forwarded-for") if xff: userIP = xff[0].split(",")[0].strip() else: userIP = req.getClientIP() tracker = RequestTrackerDB("fdsnws", app.connection(), reqid, "WAVEFORM", userid, "REQUEST WAVEFORM " + reqid, "fdsnws", userIP, req.getClientIP()) else: tracker = None # Open record stream rs = _MyRecordStream(self._rsURL, tracker, self.__bufferSize) forbidden = None # Add request streams # iterate over inventory networks for s in ro.streams: for net in self._networkIter(s): for sta in self._stationIter(net, s): for loc in self._locationIter(sta, s): for cha in self._streamIter(loc, s): try: start_time = max(cha.start(), s.time.start) except Exception: start_time = s.time.start try: end_time = min(cha.end(), s.time.end) except Exception: end_time = s.time.end if utils.isRestricted(cha) and \ (not self.__user or (self.__access and not self.__access.authorize(self.__user, net.code(), sta.code(), loc.code(), cha.code(), start_time, end_time))): if tracker: net_class = 't' if net.code()[0] in "0123456789XYZ" else 'p' tracker.line_status(start_time, end_time, net.code(), sta.code(), cha.code(), loc.code(), True, net_class, True, [], "fdsnws", "DENIED", 0, "") forbidden = forbidden or (forbidden is None) continue forbidden = False # enforce maximum sample per request restriction if maxSamples is not None: try: n = cha.sampleRateNumerator() d = cha.sampleRateDenominator() except ValueError: msg = "skipping stream without sampling " \ "rate definition: %s.%s.%s.%s" % ( net.code(), sta.code(), loc.code(), cha.code()) Logging.warning(msg) continue # calculate number of samples for requested # time window diffSec = (end_time - start_time).length() samples += int(diffSec * n / d) if samples > maxSamples: msg = "maximum number of %sM samples " \ "exceeded" % str(app._samplesM) return HTTP.renderErrorPage(req, http.REQUEST_ENTITY_TOO_LARGE, msg, ro) Logging.debug("adding stream: %s.%s.%s.%s %s - %s" \ % (net.code(), sta.code(), loc.code(), cha.code(), start_time.iso(), end_time.iso())) rs.addStream(net.code(), sta.code(), loc.code(), cha.code(), start_time, end_time, utils.isRestricted(cha), sta.archiveNetworkCode()) if forbidden: if tracker: tracker.volume_status("fdsnws", "NODATA", 0, "") tracker.request_status("END", "") msg = "access denied" return HTTP.renderErrorPage(req, http.FORBIDDEN, msg, ro) elif forbidden is None: if tracker: tracker.volume_status("fdsnws", "NODATA", 0, "") tracker.request_status("END", "") msg = "no metadata found" return HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro) # Build output filename fileName = Application.Instance()._fileNamePrefix.replace("%time", time.strftime('%Y-%m-%dT%H:%M:%S')) + '.mseed' # Create producer for async IO prod = _WaveformProducer(req, ro, rs, fileName, tracker) req.registerProducer(prod, True) prod.resumeProducing() # The request is handled by the deferred object return server.NOT_DONE_YET
delSta += 1 net.removeStation(iSta) else: iSta += 1 # remove empty networks if net.stationCount() == 0: delNet += 1 inv.removeNetwork(iNet) else: iNet += 1 if serviceName: serviceName += ": " Logging.debug("%sremoved %i networks, %i stations, %i locations, " "%i streams" % (serviceName, delNet, delSta, delLoc, delCha)) if self._debugFilter: debugLines.sort() Logging.notice("%sfilter decisions based on file %s:\n%s" % (serviceName, fileName, str("\n".join(debugLines)))) return True #--------------------------------------------------------------------------- def _getAuthSessionWrapper(self, inv, msg): if self._useArclinkAccess: access = self._access else: access = None
def parsePOST(self, content): nLine = 0 for line in content: nLine += 1 line = line.strip() # ignore empty and comment lines if len(line) == 0 or line[0] == '#': continue # collect parameter (non stream lines) toks = line.split("=", 1) if len(toks) > 1: key = toks[0].strip().lower() isPOSTParam = False for p in self.POSTParams: if p == key: if key not in self._args: self._args[key] = [] self._args[key].append(toks[1].strip()) isPOSTParam = True break if isPOSTParam: continue # time parameters not allowed in POST header for p in self.TimeParams: if p == key: raise ValueError, "time parameter in line %i not " \ "allowed in POST request" % nLine # stream parameters not allowed in POST header for p in self.StreamParams: if p == key: raise ValueError, "stream parameter in line %i not " \ "allowed in POST request" % nLine raise ValueError, "invalid parameter in line %i" % nLine else: # stream parameters toks = line.split() nToks = len(toks) if nToks != 5 and nToks != 6: raise ValueError, "invalid number of stream components " \ "in line %i" % nLine ro = RequestOptions() # net, sta, loc, cha ro.channel = RequestOptions.Channel() ro.channel.net = toks[0].split(',') ro.channel.sta = toks[1].split(',') ro.channel.loc = toks[2].split(',') ro.channel.cha = toks[3].split(',') msg = "invalid %s value in line %i" for net in ro.channel.net: if ro.ChannelChars(net): raise ValueError, msg % ('network', nLine) for sta in ro.channel.sta: if ro.ChannelChars(sta): raise ValueError, msg % ('station', nLine) for loc in ro.channel.loc: if loc != "--" and ro.ChannelChars(loc): raise ValueError, msg % ('location', nLine) for cha in ro.channel.cha: if ro.ChannelChars(cha): raise ValueError, msg % ('channel', nLine) # start/end time ro.time = RequestOptions.Time() ro.time.start = Time() for fmt in RequestOptions.TimeFormats: if ro.time.start.fromString(toks[4], fmt): break logEnd = "-" if len(toks) > 5: ro.time.end = Time() for fmt in RequestOptions.TimeFormats: if ro.time.end.fromString(toks[5], fmt): break logEnd = ro.time.end.iso() Logging.debug("ro: %s.%s.%s.%s %s %s" % (ro.channel.net, ro.channel.sta, ro.channel.loc, ro.channel.cha, ro.time.start.iso(), logEnd)) self.streams.append(ro) if len(self.streams) == 0: raise ValueError, "at least one stream line is required"
def _processRequest(self, req, ro): if ro.quality != 'B' and ro.quality != 'M': msg = "quality other than 'B' or 'M' not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.minimumLength: msg = "enforcing of minimum record length not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.longestOnly: msg = "limitation to longest segment not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) app = Application.Instance() ro._checkTimes(app._realtimeGap) # Open record stream rs = RecordStream.Open(self._rsURL) if rs is None: msg = "could not open record stream" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) maxSamples = None if app._samplesM is not None: maxSamples = app._samplesM * 1000000 samples = 0 app = Application.Instance() if app._trackdbEnabled: userid = ro.userName or app._trackdbDefaultUser reqid = 'ws' + str(int(round(time.time() * 1000) - 1420070400000)) tracker = RequestTrackerDB("fdsnws", app.connection(), reqid, "WAVEFORM", userid, "REQUEST WAVEFORM " + reqid, "fdsnws", req.getClientIP(), req.getClientIP()) else: tracker = None # Add request streams # iterate over inventory networks for s in ro.streams: for net in self._networkIter(s): for sta in self._stationIter(net, s): for loc in self._locationIter(sta, s): for cha in self._streamIter(loc, s): if utils.isRestricted(cha) and (self.__user is None or \ not self.__access.authorize(self.__user, net.code(), sta.code(), loc.code(), cha.code(), s.time.start, s.time.end)): continue # enforce maximum sample per request restriction if maxSamples is not None: try: n = cha.sampleRateNumerator() d = cha.sampleRateDenominator() except ValueException: msg = "skipping stream without sampling " \ "rate definition: %s.%s.%s.%s" % ( net.code(), sta.code(), loc.code(), cha.code()) Logging.warning(msg) continue # calculate number of samples for requested # time window diffSec = (s.time.end - s.time.start).length() samples += int(diffSec * n / d) if samples > maxSamples: msg = "maximum number of %sM samples " \ "exceeded" % str(app._samplesM) return HTTP.renderErrorPage(req, http.REQUEST_ENTITY_TOO_LARGE, msg, ro) Logging.debug("adding stream: %s.%s.%s.%s %s - %s" \ % (net.code(), sta.code(), loc.code(), cha.code(), s.time.start.iso(), s.time.end.iso())) rs.addStream(net.code(), sta.code(), loc.code(), cha.code(), s.time.start, s.time.end) if tracker: tracker.line_status(s.time.start, s.time.end, net.code(), sta.code(), cha.code(), loc.code(), False, "", True, [], "fdsnws", "OK", 0, "") # Build output filename fileName = Application.Instance()._fileNamePrefix.replace("%time", time.strftime('%Y-%m-%dT%H:%M:%S'))+'.mseed' # Create producer for async IO req.registerProducer(_WaveformProducer(req, ro, rs, fileName, tracker), False) # The request is handled by the deferred object return server.NOT_DONE_YET
def _processRequest(self, req, ro): if ro.quality != 'B' and ro.quality != 'M': msg = "quality other than 'B' or 'M' not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.minimumLength: msg = "enforcing of minimum record length not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.longestOnly: msg = "limitation to longest segment not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) app = Application.Instance() ro._checkTimes(app._realtimeGap) # Open record stream rs = RecordStream.Open(self._rsURL) if rs is None: msg = "could not open record stream" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) maxSamples = None if app._samplesM is not None: maxSamples = app._samplesM * 1000000 samples = 0 # Add request streams # iterate over inventory networks for s in ro.streams: for net in self._networkIter(s): if ro.userName is None and utils.isRestricted(net): continue for sta in self._stationIter(net, s): if ro.userName is None and utils.isRestricted(sta): continue for loc in self._locationIter(sta, s): for cha in self._streamIter(loc, s): # enforce maximum sample per request restriction if maxSamples is not None: try: n = cha.sampleRateNumerator() d = cha.sampleRateDenominator() except ValueException: msg = "skipping stream without sampling " \ "rate definition: %s.%s.%s.%s" % ( net.code(), sta.code(), loc.code(), cha.code()) Logging.warning(msg) continue # calculate number of samples for requested # time window diffSec = (s.time.end - s.time.start).length() samples += int(diffSec * n / d) if samples > maxSamples: msg = "maximum number of %sM samples " \ "exceeded" % str(app._samplesM) return HTTP.renderErrorPage(req, http.REQUEST_ENTITY_TOO_LARGE, msg, ro) Logging.debug("adding stream: %s.%s.%s.%s %s - %s" \ % (net.code(), sta.code(), loc.code(), cha.code(), s.time.start.iso(), s.time.end.iso())) rs.addStream(net.code(), sta.code(), loc.code(), cha.code(), s.time.start, s.time.end) # Build output filename fileName = Application.Instance()._fileNamePrefix+'.mseed' # Create producer for async IO req.registerProducer(_WaveformProducer(req, ro, rs, fileName), False) # The request is handled by the deferred object return server.NOT_DONE_YET
def _processRequest(self, req, ro): if ro.quality != 'B' and ro.quality != 'M': msg = "quality other than 'B' or 'M' not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.minimumLength: msg = "enforcing of minimum record length not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.longestOnly: msg = "limitation to longest segment not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) app = Application.Instance() ro._checkTimes(app._realtimeGap) maxSamples = None if app._samplesM is not None: maxSamples = app._samplesM * 1000000 samples = 0 app = Application.Instance() if app._trackdbEnabled: userid = ro.userName or app._trackdbDefaultUser reqid = 'ws' + str(int(round(time.time() * 1000) - 1420070400000)) xff = req.requestHeaders.getRawHeaders("x-forwarded-for") if xff: userIP = xff[0].split(",")[0].strip() else: userIP = req.getClientIP() tracker = RequestTrackerDB("fdsnws", app.connection(), reqid, "WAVEFORM", userid, "REQUEST WAVEFORM " + reqid, "fdsnws", userIP, req.getClientIP()) else: tracker = None # Open record stream rs = _MyRecordStream(self._rsURL, tracker, self.__bufferSize) # Add request streams # iterate over inventory networks for s in ro.streams: for net in self._networkIter(s): for sta in self._stationIter(net, s): for loc in self._locationIter(sta, s): for cha in self._streamIter(loc, s): try: start_time = max(cha.start(), s.time.start) except Exception: start_time = s.time.start try: end_time = min(cha.end(), s.time.end) except Exception: end_time = s.time.end if utils.isRestricted(cha) and \ (not self.__user or (self.__access and not self.__access.authorize(self.__user, net.code(), sta.code(), loc.code(), cha.code(), start_time, end_time))): continue # enforce maximum sample per request restriction if maxSamples is not None: try: n = cha.sampleRateNumerator() d = cha.sampleRateDenominator() except ValueError: msg = "skipping stream without sampling " \ "rate definition: %s.%s.%s.%s" % ( net.code(), sta.code(), loc.code(), cha.code()) Logging.warning(msg) continue # calculate number of samples for requested # time window diffSec = (end_time - start_time).length() samples += int(diffSec * n / d) if samples > maxSamples: msg = "maximum number of %sM samples " \ "exceeded" % str(app._samplesM) return HTTP.renderErrorPage( req, http.REQUEST_ENTITY_TOO_LARGE, msg, ro) Logging.debug("adding stream: %s.%s.%s.%s %s - %s" \ % (net.code(), sta.code(), loc.code(), cha.code(), start_time.iso(), end_time.iso())) rs.addStream(net.code(), sta.code(), loc.code(), cha.code(), start_time, end_time, utils.isRestricted(cha), sta.archiveNetworkCode()) # Build output filename fileName = Application.Instance()._fileNamePrefix.replace( "%time", time.strftime('%Y-%m-%dT%H:%M:%S')) + '.mseed' # Create producer for async IO prod = _WaveformProducer(req, ro, rs, fileName, tracker) req.registerProducer(prod, True) prod.resumeProducing() # The request is handled by the deferred object return server.NOT_DONE_YET
def run(self): modeStr = None if self._evaluationMode is not None: modeStr = DataModel.EEvaluationModeNames.name(self._evaluationMode) whitelistStr = "<None>" if self._eventTypeWhitelist is not None: whitelistStr = ", ".join(self._eventTypeWhitelist) blacklistStr = "<None>" if self._eventTypeBlacklist is not None: blacklistStr = ", ".join(self._eventTypeBlacklist) Logging.debug( "\n" "configuration read:\n" " serve\n" " dataselect : %s\n" " event : %s\n" " station : %s\n" " listenAddress : %s\n" " port : %i\n" " connections : %i\n" " htpasswd : %s\n" " accessLog : %s\n" " queryObjects : %i\n" " realtimeGap : %s\n" " samples (M) : %s\n" " allowRestricted : %s\n" " hideAuthor : %s\n" " evaluationMode : %s\n" " eventType\n" " whitelist : %s\n" " blacklist : %s\n" % ( self._serveDataSelect, self._serveEvent, self._serveStation, self._listenAddress, self._port, self._connections, self._htpasswd, self._accessLogFile, self._queryObjects, self._realtimeGap, self._samplesM, self._allowRestricted, self._hideAuthor, modeStr, whitelistStr, blacklistStr, ) ) if not self._serveDataSelect and not self._serveEvent and not self._serveStation: Logging.error("all services disabled through configuration") return False # access logger if requested if self._accessLogFile: self._accessLog = Log(self._accessLogFile) # load inventory needed by DataSelect and Station service if self._serveDataSelect or self._serveStation: self._loadInventory() DataModel.PublicObject.SetRegistrationEnabled(False) shareDir = os.path.join(Environment.Instance().shareDir(), "fdsnws") # Overwrite/set mime type of *.wadl and *.xml documents. Instead of # using the official types defined in /etc/mime.types 'application/xml' # is used as enforced by the FDSNWS spec. static.File.contentTypes[".wadl"] = "application/xml" static.File.contentTypes[".xml"] = "application/xml" # create resource tree /fdsnws/... root = ListingResource() fileName = os.path.join(shareDir, "favicon.ico") fileRes = static.File(fileName, "image/x-icon") fileRes.childNotFound = NoResource() fileRes.isLeaf = True root.putChild("favicon.ico", fileRes) prefix = ListingResource() root.putChild("fdsnws", prefix) # right now service version is shared by all services serviceVersion = ServiceVersion() # dataselect if self._serveDataSelect: dataselect = ListingResource() prefix.putChild("dataselect", dataselect) dataselect1 = DirectoryResource(os.path.join(shareDir, "dataselect.html")) dataselect.putChild("1", dataselect1) dataselect1.putChild("query", FDSNDataSelect()) msg = "authorization for restricted time series data required" authSession = self._getAuthSessionWrapper(FDSNDataSelectRealm(), msg) dataselect1.putChild("queryauth", authSession) dataselect1.putChild("version", serviceVersion) fileRes = static.File(os.path.join(shareDir, "dataselect.wadl")) fileRes.childNotFound = NoResource() dataselect1.putChild("application.wadl", fileRes) # event if self._serveEvent: event = ListingResource() prefix.putChild("event", event) event1 = DirectoryResource(os.path.join(shareDir, "event.html")) event.putChild("1", event1) event1.putChild( "query", FDSNEvent(self._hideAuthor, self._evaluationMode, self._eventTypeWhitelist, self._eventTypeBlacklist), ) fileRes = static.File(os.path.join(shareDir, "catalogs.xml")) fileRes.childNotFound = NoResource() event1.putChild("catalogs", fileRes) fileRes = static.File(os.path.join(shareDir, "contributors.xml")) fileRes.childNotFound = NoResource() event1.putChild("contributors", fileRes) event1.putChild("version", serviceVersion) fileRes = static.File(os.path.join(shareDir, "event.wadl")) fileRes.childNotFound = NoResource() event1.putChild("application.wadl", fileRes) # station if self._serveStation: station = ListingResource() prefix.putChild("station", station) station1 = DirectoryResource(os.path.join(shareDir, "station.html")) station.putChild("1", station1) station1.putChild("query", FDSNStation(self._inv, self._allowRestricted, self._queryObjects)) station1.putChild("version", serviceVersion) fileRes = static.File(os.path.join(shareDir, "station.wadl")) fileRes.childNotFound = NoResource() station1.putChild("application.wadl", fileRes) retn = False try: # start listen for incoming request reactor.listenTCP(self._port, Site(root), self._connections, self._listenAddress) # start processing Logging.info("start listening") log.addObserver(logSC3) reactor.run() retn = True except Exception, e: Logging.error(str(e))
def sh2proc(self, file): ep = DataModel.EventParameters() magnitude = DataModel.Magnitude.Create() origin = DataModel.Origin.Create() origin.setCreationInfo(DataModel.CreationInfo()) origin.creationInfo().setCreationTime(Core.Time.GMT()) originQuality = None originCE = None latFound = False lonFound = False depthError = None originComments = {} # phase variables, reset after 'end of phase' pick = None stationMag = None staCode = None compCode = None # read file line by line, split key and value at colon iLine = 0 for line in file: iLine += 1 a = line.split(':', 1) key = a[0].strip() keyLower = key.lower() value = None # empty line if len(keyLower) == 0: continue # end of phase elif keyLower == '--- end of phase ---': if pick is None: Logging.warning('Line %i: found empty phase block' % iLine) continue if staCode is None or compCode is None: Logging.warning('Line %i: end of phase, stream code ' \ 'incomplete' % iLine) continue if not self.streams.has_key(staCode): Logging.warning('Line %i: end of phase, station code %s ' \ 'not found in inventory' % (iLine, staCode)) continue if not self.streams[staCode].has_key(compCode): Logging.warning('Line %i: end of phase, component %s of ' \ 'station %s not found in inventory' % ( iLine, compCode, staCode)) continue streamID = self.streams[staCode][compCode] pick.setWaveformID(streamID) ep.add(pick) arrival.setPickID(pick.publicID()) origin.add(arrival) amplitude.setPickID(pick.publicID()) ep.add(amplitude) if stationMag is not None: stationMag.setWaveformID(streamID) origin.add(stationMag) stationMagContrib = DataModel.StationMagnitudeContribution( ) stationMagContrib.setStationMagnitudeID( stationMag.publicID()) magnitude.add(stationMagContrib) pick = None staCode = None compCode = None stationMag = None continue # empty key elif len(a) == 1: Logging.warning('Line %i: key without value' % iLine) continue value = a[1].strip() if pick is None: pick = DataModel.Pick.Create() arrival = DataModel.Arrival() amplitude = DataModel.Amplitude.Create() try: ############################################################## # station parameters # station code if keyLower == 'station code': staCode = value # pick time elif keyLower == 'onset time': pick.setTime(DataModel.TimeQuantity(self.parseTime(value))) # pick onset type elif keyLower == 'onset type': found = False for onset in [ DataModel.EMERGENT, DataModel.IMPULSIVE, DataModel.QUESTIONABLE ]: if value == DataModel.EPickOnsetNames_name(onset): pick.setOnset(onset) found = True break if not found: raise Exception('Unsupported onset value') # phase code elif keyLower == 'phase name': phase = DataModel.Phase() phase.setCode(value) pick.setPhaseHint(phase) arrival.setPhase(phase) # event type, added as origin comment later on elif keyLower == 'event type': originComments[key] = value # filter ID elif keyLower == 'applied filter': pick.setFilterID(value) # channel code, prepended by configured Channel prefix if only # one character is found elif keyLower == 'component': compCode = value # pick evaluation mode elif keyLower == 'pick type': found = False for mode in [DataModel.AUTOMATIC, DataModel.MANUAL]: if value == DataModel.EEvaluationModeNames_name(mode): pick.setEvaluationMode(mode) found = True break if not found: raise Exception('Unsupported evaluation mode value') # arrival weight elif keyLower == 'weight': arrival.setWeight(float(value)) # arrival azimuth elif keyLower == 'theo. azimuth (deg)': arrival.setAzimuth(float(value)) # arrival backazimuth elif keyLower == 'theo. backazimuth (deg)': pick.setBackazimuth(DataModel.RealQuantity(float(value))) # arrival distance elif keyLower == 'distance (deg)': arrival.setDistance(float(value)) # ignored elif keyLower == 'distance (km)': Logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) # arrival time residual elif keyLower == 'residual time': arrival.setTimeResidual(float(value)) # ignored elif keyLower == 'quality number': Logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) # station magnitude value and type elif keyLower.startswith('magnitude '): stationMag = DataModel.StationMagnitude.Create() stationMag.setAmplitudeID(amplitude.publicID()) stationMag.setMagnitude( DataModel.RealQuantity(float(value))) magType = self.parseMagType(key[10:]) if len(magType) > 0: stationMag.setType(magType) amplitude.setType(magType) ############################################################### # origin parameters # event ID, added as origin comment later on elif keyLower == 'event id': originComments[key] = value # magnitude value and type elif keyLower.startswith('mean magnitude '): magnitude.setMagnitude(DataModel.RealQuantity( float(value))) magType = self.parseMagType(key[15:]) if len(magType) > 0: magnitude.setType(magType) # latitude elif keyLower == 'latitude': origin.latitude().setValue(float(value)) latFound = True elif keyLower == 'error in latitude (km)': origin.latitude().setUncertainty(float(value)) # longitude elif keyLower == 'longitude': origin.longitude().setValue(float(value)) lonFound = True elif keyLower == 'error in longitude (km)': origin.longitude().setUncertainty(float(value)) # depth elif keyLower == 'depth (km)': origin.setDepth(DataModel.RealQuantity(float(value))) if depthError is not None: origin.depth().setUncertainty(depthError) elif keyLower == 'depth type': Logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) elif keyLower == 'error in depth (km)': depthError = float(value) try: origin.depth().setUncertainty(depthError) except Core.ValueException: pass # time elif keyLower == 'origin time': origin.time().setValue(self.parseTime(value)) elif keyLower == 'error in origin time': origin.time().setUncertainty(float(value)) # region table, added as origin comment later on elif keyLower == 'region table': originComments[key] = value # region table, added as origin comment later on elif keyLower == 'region id': originComments[key] = value # source region, added as origin comment later on elif keyLower == 'source region': originComments[key] = value # used station count elif keyLower == 'no. of stations used': if originQuality is None: originQuality = DataModel.OriginQuality() originQuality.setUsedStationCount(int(value)) # ignored elif keyLower == 'reference location name': Logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) # confidence ellipsoid major axis elif keyLower == 'error ellipse major': if originCE is None: originCE = DataModel.ConfidenceEllipsoid() originCE.setSemiMajorAxisLength(float(value)) # confidence ellipsoid minor axis elif keyLower == 'error ellipse minor': if originCE is None: originCE = DataModel.ConfidenceEllipsoid() originCE.setSemiMinorAxisLength(float(value)) # confidence ellipsoid rotation elif keyLower == 'error ellipse strike': if originCE is None: originCE = DataModel.ConfidenceEllipsoid() originCE.setMajorAxisRotation(float(value)) # azimuthal gap elif keyLower == 'max azimuthal gap (deg)': if originQuality is None: originQuality = DataModel.OriginQuality() originQuality.setAzimuthalGap(float(value)) # creation info author elif keyLower == 'author': origin.creationInfo().setAuthor(value) # creation info agency elif keyLower == 'agency': origin.creationInfo().setAgencyID(value) # earth model id elif keyLower == 'velocity model': origin.setEarthModelID(value) # standard error elif keyLower == 'rms of residuals (sec)': if originQuality is None: originQuality = DataModel.OriginQuality() originQuality.setStandardError(float(value)) # ignored elif keyLower == 'phase flags': Logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) # ignored elif keyLower == 'location input params': Logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) # unknown key else: Logging.warning('Line %i: ignoring unknown parameter: %s' \ % (iLine, key)) except ValueError, ve: Logging.warning('Line %i: can not parse %s value' % (iLine, key)) except Exception: Logging.error('Line %i: %s' % (iLine, str(traceback.format_exc()))) return None
def _site(self): modeStr = None if self._evaluationMode is not None: modeStr = DataModel.EEvaluationModeNames.name(self._evaluationMode) whitelistStr = "<None>" if self._eventTypeWhitelist is not None: whitelistStr = ", ".join(self._eventTypeWhitelist) blacklistStr = "<None>" if self._eventTypeBlacklist is not None: blacklistStr = ", ".join(self._eventTypeBlacklist) stationFilterStr = "<None>" if self._stationFilter is not None: stationFilterStr = self._stationFilter dataSelectFilterStr = "<None>" if self._dataSelectFilter is not None: dataSelectFilterStr = self._dataSelectFilter Logging.debug("\n" "configuration read:\n" " serve\n" " dataselect : %s\n" " event : %s\n" " station : %s\n" " availability : %s\n" " listenAddress : %s\n" " port : %i\n" " connections : %i\n" " htpasswd : %s\n" " accessLog : %s\n" " queryObjects : %i\n" " realtimeGap : %s\n" " samples (M) : %s\n" " recordBulkSize : %i\n" " allowRestricted : %s\n" " useArclinkAccess: %s\n" " hideAuthor : %s\n" " evaluationMode : %s\n" " data availability\n" " enabled : %s\n" " cache duration: %i\n" " repo name : %s\n" " dcc name : %s\n" " eventType\n" " whitelist : %s\n" " blacklist : %s\n" " inventory filter\n" " station : %s\n" " dataSelect : %s\n" " debug enabled : %s\n" " trackdb\n" " enabled : %s\n" " defaultUser : %s\n" " auth\n" " enabled : %s\n" " gnupgHome : %s\n" " requestLog : %s\n" % ( self._serveDataSelect, self._serveEvent, self._serveStation, self._serveAvailability, self._listenAddress, self._port, self._connections, self._htpasswd, self._accessLogFile, self._queryObjects, self._realtimeGap, self._samplesM, self._recordBulkSize, self._allowRestricted, self._useArclinkAccess, self._hideAuthor, modeStr, self._daEnabled, self._daCacheDuration, self._daRepositoryName, self._daDCCName, whitelistStr, blacklistStr, stationFilterStr, dataSelectFilterStr, self._debugFilter, self._trackdbEnabled, self._trackdbDefaultUser, self._authEnabled, self._authGnupgHome, self._requestLogFile)) if not self._serveDataSelect and not self._serveEvent and \ not self._serveStation: Logging.error("all services disabled through configuration") return None # access logger if requested if self._accessLogFile: self._accessLog = Log(self._accessLogFile) # request logger if requested if self._requestLogFile: # import here, so we don't depend on GeoIP if request log is not needed from seiscomp3.fdsnws.reqlog import RequestLog self._requestLog = RequestLog(self._requestLogFile) # load inventory needed by DataSelect and Station service stationInv = dataSelectInv = None if self._serveDataSelect or self._serveStation: retn = False stationInv = dataSelectInv = Inventory.Instance().inventory() Logging.info("inventory loaded") if self._serveDataSelect and self._serveStation: # clone inventory if station and dataSelect filter are distinct # else share inventory between both services if self._stationFilter != self._dataSelectFilter: dataSelectInv = self._cloneInventory(stationInv) retn = self._filterInventory(stationInv, self._stationFilter, "station") and \ self._filterInventory( dataSelectInv, self._dataSelectFilter, "dataSelect") else: retn = self._filterInventory( stationInv, self._stationFilter) elif self._serveStation: retn = self._filterInventory(stationInv, self._stationFilter) else: retn = self._filterInventory( dataSelectInv, self._dataSelectFilter) if not retn: return None self._access = Access() if self._serveDataSelect and self._useArclinkAccess: self._access.initFromSC3Routing(self.query().loadRouting()) DataModel.PublicObject.SetRegistrationEnabled(False) shareDir = os.path.join(Environment.Instance().shareDir(), 'fdsnws') # Overwrite/set mime type of *.wadl and *.xml documents. Instead of # using the official types defined in /etc/mime.types 'application/xml' # is used as enforced by the FDSNWS spec. static.File.contentTypes['.wadl'] = 'application/xml' static.File.contentTypes['.xml'] = 'application/xml' # create resource tree /fdsnws/... root = ListingResource() fileName = os.path.join(shareDir, 'favicon.ico') fileRes = static.File(fileName, 'image/x-icon') fileRes.childNotFound = NoResource() fileRes.isLeaf = True root.putChild('favicon.ico', fileRes) prefix = ListingResource() root.putChild('fdsnws', prefix) # dataselect if self._serveDataSelect: dataselect = ListingResource(DataSelectVersion) prefix.putChild('dataselect', dataselect) lstFile = os.path.join(shareDir, 'dataselect.html') dataselect1 = DirectoryResource(lstFile, DataSelectVersion) dataselect.putChild('1', dataselect1) dataselect1.putChild('query', FDSNDataSelect( dataSelectInv, self._recordBulkSize)) msg = 'authorization for restricted time series data required' authSession = self._getAuthSessionWrapper(dataSelectInv, msg) dataselect1.putChild('queryauth', authSession) dataselect1.putChild('version', ServiceVersion(DataSelectVersion)) fileRes = static.File(os.path.join(shareDir, 'dataselect.wadl')) fileRes.childNotFound = NoResource(DataSelectVersion) dataselect1.putChild('application.wadl', fileRes) fileRes = static.File(os.path.join( shareDir, 'dataselect-builder.html')) fileRes.childNotFound = NoResource(DataSelectVersion) dataselect1.putChild('builder', fileRes) if self._authEnabled: dataselect1.putChild('auth', AuthResource( DataSelectVersion, self._authGnupgHome, self._userdb)) # event if self._serveEvent: event = ListingResource(EventVersion) prefix.putChild('event', event) lstFile = os.path.join(shareDir, 'event.html') event1 = DirectoryResource(lstFile, EventVersion) event.putChild('1', event1) event1.putChild('query', FDSNEvent(self._hideAuthor, self._evaluationMode, self._eventTypeWhitelist, self._eventTypeBlacklist, self._eventFormats)) fileRes = static.File(os.path.join(shareDir, 'catalogs.xml')) fileRes.childNotFound = NoResource(EventVersion) event1.putChild('catalogs', fileRes) fileRes = static.File(os.path.join(shareDir, 'contributors.xml')) fileRes.childNotFound = NoResource(EventVersion) event1.putChild('contributors', fileRes) event1.putChild('version', ServiceVersion(EventVersion)) fileRes = static.File(os.path.join(shareDir, 'event.wadl')) fileRes.childNotFound = NoResource(EventVersion) event1.putChild('application.wadl', fileRes) fileRes = static.File(os.path.join(shareDir, 'event-builder.html')) fileRes.childNotFound = NoResource(EventVersion) event1.putChild('builder', fileRes) # station if self._serveStation: station = ListingResource(StationVersion) prefix.putChild('station', station) lstFile = os.path.join(shareDir, 'station.html') station1 = DirectoryResource(lstFile, StationVersion) station.putChild('1', station1) station1.putChild('query', FDSNStation(stationInv, self._allowRestricted, self._queryObjects, self._daEnabled)) station1.putChild('version', ServiceVersion(StationVersion)) # wadl, optionally filtered filterList = [] if self._daEnabled else ['name="matchtimeseries"'] try: fileRes = WADLFilter(os.path.join(shareDir, 'station.wadl'), filterList) except: fileRes = NoResource(StationVersion) station1.putChild('application.wadl', fileRes) fileRes = static.File(os.path.join( shareDir, 'station-builder.html')) fileRes.childNotFound = NoResource(StationVersion) station1.putChild('builder', fileRes) # availability if self._serveAvailability: # create a set of waveformIDs which represent open channels if self._serveDataSelect: openStreams = set() for iNet in xrange(dataSelectInv.networkCount()): net = dataSelectInv.network(iNet) if utils.isRestricted(net): continue for iSta in xrange(net.stationCount()): sta = net.station(iSta) if utils.isRestricted(sta): continue for iLoc in xrange(sta.sensorLocationCount()): loc = sta.sensorLocation(iLoc) for iCha in xrange(loc.streamCount()): cha = loc.stream(iCha) if utils.isRestricted(cha): continue openStreams.add("{0}.{1}.{2}.{3}".format( net.code(), sta.code(), loc.code(), cha.code())) self._openStreams = openStreams else: self._openStreams = None ext = ListingResource() prefix.putChild('ext', ext) availability = ListingResource(AvailabilityVersion) ext.putChild('availability', availability) lstFile = os.path.join(shareDir, 'availability.html') availability1 = DirectoryResource(lstFile, AvailabilityVersion) availability.putChild('1', availability1) availability1.putChild('extent', AvailabilityExtent()) availability1.putChild('query', AvailabilityQuery()) availability1.putChild( 'version', ServiceVersion(AvailabilityVersion)) fileRes = static.File(os.path.join(shareDir, 'station.wadl')) fileRes.childNotFound = NoResource(AvailabilityVersion) availability1.putChild('availability.wadl', fileRes) fileRes = static.File(os.path.join( shareDir, 'availability-extent-builder.html')) fileRes.childNotFound = NoResource(AvailabilityVersion) availability1.putChild('builder-extent', fileRes) fileRes = static.File(os.path.join( shareDir, 'availability-builder.html')) fileRes.childNotFound = NoResource(AvailabilityVersion) availability1.putChild('builder', fileRes) # static files fileRes = static.File(os.path.join(shareDir, 'js')) fileRes.childNotFound = NoResource() fileRes.hideInListing = True prefix.putChild('js', fileRes) fileRes = static.File(os.path.join(shareDir, 'css')) fileRes.childNotFound = NoResource() fileRes.hideInListing = True prefix.putChild('css', fileRes) return Site(root)
def _processRequest(self, req, ro, dbq, exp): if req._disconnected: return False DataModel.PublicObject.SetRegistrationEnabled(False) maxObj = Application.Instance()._queryObjects # query event(s) ep = DataModel.EventParameters() if ro.eventIDs: for eID in ro.eventIDs: event = dbq.getEventByPublicID(eID) event = DataModel.Event.Cast(event) if event: ep.add(event) else: self._findEvents(ep, ro, dbq) if ep.eventCount() == 0: msg = "No matching events found" utils.writeTS(req, HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro)) return False objCount = ep.eventCount() Logging.debug("events found: %i" % objCount) if not HTTP.checkObjects(req, objCount, maxObj): return False pickIDs = set() # add related information for iEvent in xrange(ep.eventCount()): e = ep.event(iEvent) eID = e.publicID() # eventDescriptions and comments objCount += dbq.loadEventDescriptions(e) if ro.comments: objCount += dbq.loadComments(e) if not HTTP.checkObjects(req, objCount, maxObj): return False # origin references: either all or preferred only dbIter = dbq.getObjects(e, DataModel.OriginReference.TypeInfo()) for obj in dbIter: oRef = DataModel.OriginReference.Cast(obj) if oRef is None: continue if ro.allOrigins: e.add(oRef) elif oRef.originID() == e.preferredOriginID(): e.add(oRef) dbIter.close() objCount += e.originReferenceCount() # TODO: load FocalMechanismReferences??? if not HTTP.checkObjects(req, objCount, maxObj): return False # origins for iORef in xrange(e.originReferenceCount()): oID = e.originReference(iORef).originID() obj = dbq.getObject(DataModel.Origin.TypeInfo(), oID) o = DataModel.Origin.Cast(obj) if o is None: continue ep.add(o) objCount += 1 # comments if ro.comments: objCount += dbq.loadComments(o) if not HTTP.checkObjects(req, objCount, maxObj): return False # magnitudes dbIter = dbq.getObjects(oID, DataModel.Magnitude.TypeInfo()) for obj in dbIter: mag = DataModel.Magnitude.Cast(obj) if mag is None: continue if ro.allMags: o.add(mag) elif mag.publicID() == e.preferredMagnitudeID(): o.add(mag) dbIter.close() objCount += o.magnitudeCount() if ro.comments: for iMag in xrange(o.magnitudeCount()): objCount += dbq.loadComments(o.magnitude(iMag)) if not HTTP.checkObjects(req, objCount, maxObj): return False # arrivals if ro.arrivals: objCount += dbq.loadArrivals(o) # collect pick IDs if requested if ro.picks: for iArrival in xrange(o.arrivalCount()): pickIDs.add(o.arrival(iArrival).pickID()) if not HTTP.checkObjects(req, objCount, maxObj): return False # picks if pickIDs: objCount += len(pickIDs) if not HTTP.checkObjects(req, objCount, maxObj): return False for pickID in pickIDs: obj = dbq.getObject(DataModel.Pick.TypeInfo(), pickID) pick = DataModel.Pick.Cast(obj) if pick is not None: ep.add(pick) if ro.output == "csv": req.setHeader("Content-Type", "text/plain") else: req.setHeader("Content-Type", "application/xml") sink = utils.Sink(req) if not exp.write(sink, ep): return False Logging.notice("%s: returned %i events and %i origins (total " \ "objects/bytes: %i/%i)" % (ro.service, ep.eventCount(), ep.originCount(), objCount, sink.written)) utils.accessLog(req, ro, http.OK, sink.written, None) return True
def getResourceFor(self, request): Logging.debug("request (%s): %s" % (request.getClientIP(), request.uri)) request.setHeader('Server', "SeisComP3-FDSNWS/%s" % VERSION) return server.Site.getResourceFor(self, request)
delSta += 1 net.removeStation(iSta) else: iSta += 1 # remove empty networks if net.stationCount() == 0: delNet += 1 inv.removeNetwork(iNet) else: iNet += 1 if serviceName: serviceName += ": " Logging.debug("%sremoved %i networks, %i stations, %i locations, " "%i streams" % (serviceName, delNet, delSta, delLoc, delCha)) if self._debugFilter: debugLines.sort() Logging.notice("%sfilter decisions based on file %s:\n%s" % ( serviceName, fileName, str("\n".join(debugLines)))) return True #--------------------------------------------------------------------------- def _getAuthSessionWrapper(self, inv, msg): if self._useArclinkAccess: access = self._access else:
def loadStreams(self): now = Core.Time.GMT() inv = Client.Inventory.Instance() self.streams = {} # try to load streams by detecLocid and detecStream mod = self.configModule() if mod is not None and mod.configStationCount() > 0: Logging.info('loading streams using detecLocid and detecStream') for i in range(mod.configStationCount()): cfg = mod.configStation(i) net = cfg.networkCode() sta = cfg.stationCode() if sta in self.streams: Logging.warning('ambiguous stream id found for station ' '%s.%s' % (net, sta)) continue setup = DataModel.findSetup(cfg, self.name(), True) if not setup: Logging.warning('could not find station setup for %s.%s' % ( net, sta)) continue params = DataModel.ParameterSet.Find(setup.parameterSetID()) if not params: Logging.warning('could not find station parameters for ' '%s.%s' % (net, sta)) continue detecLocid = '' detecStream = None for j in range(params.parameterCount()): param = params.parameter(j) if param.name() == 'detecStream': detecStream = param.value() elif param.name() == 'detecLocid': detecLocid = param.value() if detecStream is None: Logging.warning('could not find detecStream for %s.%s' % ( net, sta)) continue loc = inv.getSensorLocation(net, sta, detecLocid, now) if loc is None: Logging.warning('could not find preferred location for ' '%s.%s' % (net, sta)) continue components = {} tc = DataModel.ThreeComponents() DataModel.getThreeComponents(tc, loc, detecStream[:2], now) if tc.vertical(): cha = tc.vertical() wfsID = DataModel.WaveformStreamID(net, sta, loc.code(), cha.code(), '') components[cha.code()[-1]] = wfsID Logging.debug('add stream %s (vertical)' % wfs2Str(wfsID)) if tc.firstHorizontal(): cha = tc.firstHorizontal() wfsID = DataModel.WaveformStreamID(net, sta, loc.code(), cha.code(), '') components[cha.code()[-1]] = wfsID Logging.debug('add stream %s (first horizontal)' % wfs2Str(wfsID)) if tc.secondHorizontal(): cha = tc.secondHorizontal() wfsID = DataModel.WaveformStreamID(net, sta, loc.code(), cha.code(), '') components[cha.code()[-1]] = wfsID Logging.debug( 'add stream %s (second horizontal)' % wfs2Str(wfsID)) if len(components) > 0: self.streams[sta] = components return # fallback loading streams from inventory Logging.warning('no configuration module available, loading streams ' 'from inventory and selecting first available stream ' 'matching epoch') for iNet in range(inv.inventory().networkCount()): net = inv.inventory().network(iNet) Logging.debug('network %s: loaded %i stations' % ( net.code(), net.stationCount())) for iSta in range(net.stationCount()): sta = net.station(iSta) try: start = sta.start() if not start <= now: continue except: continue try: end = sta.end() if not now <= end: continue except: pass for iLoc in range(sta.sensorLocationCount()): loc = sta.sensorLocation(iLoc) for iCha in range(loc.streamCount()): cha = loc.stream(iCha) wfsID = DataModel.WaveformStreamID(net.code(), sta.code(), loc.code(), cha.code(), '') comp = cha.code()[2] if sta.code() not in self.streams: components = {} components[comp] = wfsID self.streams[sta.code()] = components else: # Seismic Handler does not support network, # location and channel code: make sure network and # location codes match first item in station # specific steam list oldWfsID = list(self.streams[sta.code()].values())[0] if net.code() != oldWfsID.networkCode() or \ loc.code() != oldWfsID.locationCode() or \ cha.code()[:2] != oldWfsID.channelCode()[:2]: Logging.warning('ambiguous stream id found ' 'for station %s, ignoring %s' % (sta.code(), wfs2Str(wfsID))) continue self.streams[sta.code()][comp] = wfsID Logging.debug('add stream %s' % wfs2Str(wfsID))
def run(self): modeStr = None if self._evaluationMode is not None: modeStr = DataModel.EEvaluationModeNames.name(self._evaluationMode) whitelistStr = "<None>" if self._eventTypeWhitelist is not None: whitelistStr = ", ".join(self._eventTypeWhitelist) blacklistStr = "<None>" if self._eventTypeBlacklist is not None: blacklistStr = ", ".join(self._eventTypeBlacklist) stationFilterStr = "<None>" if self._stationFilter is not None: stationFilterStr = self._stationFilter dataSelectFilterStr = "<None>" if self._dataSelectFilter is not None: dataSelectFilterStr = self._dataSelectFilter Logging.debug("\n" \ "configuration read:\n" \ " serve\n" \ " dataselect : %s\n" \ " event : %s\n" \ " station : %s\n" \ " listenAddress : %s\n" \ " port : %i\n" \ " connections : %i\n" \ " htpasswd : %s\n" \ " accessLog : %s\n" \ " queryObjects : %i\n" \ " realtimeGap : %s\n" \ " samples (M) : %s\n" \ " allowRestricted : %s\n" \ " useArclinkAccess: %s\n" \ " hideAuthor : %s\n" \ " evaluationMode : %s\n" \ " eventType\n" \ " whitelist : %s\n" \ " blacklist : %s\n" \ " inventory filter\n" \ " station : %s\n" \ " dataSelect : %s\n" \ " debug enabled : %s\n" \ " trackdb\n" \ " enabled : %s\n" \ " defaultUser : %s\n" \ " auth\n" \ " enabled : %s\n" \ " gnupgHome : %s\n" % ( self._serveDataSelect, self._serveEvent, self._serveStation, self._listenAddress, self._port, self._connections, self._htpasswd, self._accessLogFile, self._queryObjects, self._realtimeGap, self._samplesM, self._allowRestricted, self._useArclinkAccess, self._hideAuthor, modeStr, whitelistStr, blacklistStr, stationFilterStr, dataSelectFilterStr, self._debugFilter, self._trackdbEnabled, self._trackdbDefaultUser, self._authEnabled, self._authGnupgHome)) if not self._serveDataSelect and not self._serveEvent and \ not self._serveStation: Logging.error("all services disabled through configuration") return False # access logger if requested if self._accessLogFile: self._accessLog = Log(self._accessLogFile) # load inventory needed by DataSelect and Station service stationInv = dataSelectInv = None if self._serveDataSelect or self._serveStation: retn = False stationInv = dataSelectInv = Inventory.Instance().inventory() Logging.info("inventory loaded") if self._serveDataSelect and self._serveStation: # clone inventory if station and dataSelect filter are distinct # else share inventory between both services if self._stationFilter != self._dataSelectFilter: dataSelectInv = self._cloneInventory(stationInv) retn = self._filterInventory(stationInv, self._stationFilter, "station") and \ self._filterInventory(dataSelectInv, self._dataSelectFilter, "dataSelect") else: retn = self._filterInventory(stationInv, self._stationFilter) elif self._serveStation: retn = self._filterInventory(stationInv, self._stationFilter) else: retn = self._filterInventory(dataSelectInv, self._dataSelectFilter) if not retn: return False if self._serveDataSelect: self._access.initFromSC3Routing(self.query().loadRouting()) DataModel.PublicObject.SetRegistrationEnabled(False) shareDir = os.path.join(Environment.Instance().shareDir(), 'fdsnws') # Overwrite/set mime type of *.wadl and *.xml documents. Instead of # using the official types defined in /etc/mime.types 'application/xml' # is used as enforced by the FDSNWS spec. static.File.contentTypes['.wadl'] = 'application/xml' static.File.contentTypes['.xml'] = 'application/xml' # create resource tree /fdsnws/... root = ListingResource() fileName = os.path.join(shareDir, 'favicon.ico') fileRes = static.File(fileName, 'image/x-icon') fileRes.childNotFound = NoResource() fileRes.isLeaf = True root.putChild('favicon.ico', fileRes) prefix = ListingResource() root.putChild('fdsnws', prefix) # right now service version is shared by all services serviceVersion = ServiceVersion() # dataselect if self._serveDataSelect: dataselect = ListingResource() prefix.putChild('dataselect', dataselect) dataselect1 = DirectoryResource(os.path.join(shareDir, 'dataselect.html')) dataselect.putChild('1', dataselect1) dataselect1.putChild('query', FDSNDataSelect(dataSelectInv)) msg = 'authorization for restricted time series data required' authSession = self._getAuthSessionWrapper(dataSelectInv, msg) dataselect1.putChild('queryauth', authSession) dataselect1.putChild('version', serviceVersion) fileRes = static.File(os.path.join(shareDir, 'dataselect.wadl')) fileRes.childNotFound = NoResource() dataselect1.putChild('application.wadl', fileRes) fileRes = static.File(os.path.join(shareDir, 'dataselect-builder.html')) fileRes.childNotFound = NoResource() dataselect1.putChild('builder', fileRes) if self._authEnabled: dataselect1.putChild('auth', AuthResource(self._authGnupgHome, self._userdb)) # event if self._serveEvent: event = ListingResource() prefix.putChild('event', event) event1 = DirectoryResource(os.path.join(shareDir, 'event.html')) event.putChild('1', event1) event1.putChild('query', FDSNEvent(self._hideAuthor, self._evaluationMode, self._eventTypeWhitelist, self._eventTypeBlacklist)) fileRes = static.File(os.path.join(shareDir, 'catalogs.xml')) fileRes.childNotFound = NoResource() event1.putChild('catalogs', fileRes) fileRes = static.File(os.path.join(shareDir, 'contributors.xml')) fileRes.childNotFound = NoResource() event1.putChild('contributors', fileRes) event1.putChild('version', serviceVersion) fileRes = static.File(os.path.join(shareDir, 'event.wadl')) fileRes.childNotFound = NoResource() event1.putChild('application.wadl', fileRes) fileRes = static.File(os.path.join(shareDir, 'event-builder.html')) fileRes.childNotFound = NoResource() event1.putChild('builder', fileRes) # station if self._serveStation: station = ListingResource() prefix.putChild('station', station) station1 = DirectoryResource(os.path.join(shareDir, 'station.html')) station.putChild('1', station1) station1.putChild('query', FDSNStation(stationInv, self._allowRestricted, self._queryObjects)) station1.putChild('version', serviceVersion) fileRes = static.File(os.path.join(shareDir, 'station.wadl')) fileRes.childNotFound = NoResource() station1.putChild('application.wadl', fileRes) fileRes = static.File(os.path.join(shareDir, 'station-builder.html')) fileRes.childNotFound = NoResource() station1.putChild('builder', fileRes) # static files fileRes = static.File(os.path.join(shareDir, 'js')) fileRes.childNotFound = NoResource() fileRes.hideInListing = True prefix.putChild('js', fileRes) fileRes = static.File(os.path.join(shareDir, 'css')) fileRes.childNotFound = NoResource() fileRes.hideInListing = True prefix.putChild('css', fileRes) retn = False try: # start listen for incoming request reactor.listenTCP(self._port, Site(root), self._connections, self._listenAddress) # start processing Logging.info("start listening") log.addObserver(logSC3) reactor.run() retn = True except Exception, e: Logging.error(str(e))
def sh2proc(self, file): ep = DataModel.EventParameters() origin = DataModel.Origin.Create() event = DataModel.Event.Create() origin.setCreationInfo(DataModel.CreationInfo()) origin.creationInfo().setCreationTime(Core.Time.GMT()) originQuality = None originCE = None latFound = False lonFound = False depthError = None originComments = {} # variables, reset after 'end of phase' pick = None stationMag = None staCode = None compCode = None stationMagBB = None amplitudeDisp = None amplitudeVel = None amplitudeSNR = None amplitudeBB = None magnitudeMB = None magnitudeML = None magnitudeMS = None magnitudeBB = None km2degFac = 1.0 / Math.deg2km(1.0) # read file line by line, split key and value at colon iLine = 0 for line in file: iLine += 1 a = line.split(':', 1) key = a[0].strip() keyLower = key.lower() value = None # empty line if len(keyLower) == 0: continue # end of phase elif keyLower == '--- end of phase ---': if pick is None: Logging.warning('Line %i: found empty phase block' % iLine) continue if staCode is None or compCode is None: Logging.warning('Line %i: end of phase, stream code ' 'incomplete' % iLine) continue if staCode not in self.streams: Logging.warning('Line %i: end of phase, station code %s ' 'not found in inventory' % (iLine, staCode)) continue if compCode not in self.streams[staCode]: Logging.warning('Line %i: end of phase, component %s of ' 'station %s not found in inventory' % ( iLine, compCode, staCode)) continue streamID = self.streams[staCode][compCode] pick.setWaveformID(streamID) ep.add(pick) arrival.setPickID(pick.publicID()) arrival.setPhase(phase) origin.add(arrival) if amplitudeSNR is not None: amplitudeSNR.setPickID(pick.publicID()) amplitudeSNR.setWaveformID(streamID) ep.add(amplitudeSNR) if amplitudeBB is not None: amplitudeBB.setPickID(pick.publicID()) amplitudeBB.setWaveformID(streamID) ep.add(amplitudeBB) if stationMagBB is not None: stationMagBB.setWaveformID(streamID) origin.add(stationMagBB) stationMagContrib = DataModel.StationMagnitudeContribution() stationMagContrib.setStationMagnitudeID( stationMagBB.publicID()) if magnitudeBB is None: magnitudeBB = DataModel.Magnitude.Create() magnitudeBB.add(stationMagContrib) if stationMag is not None: if stationMag.type() in ['mb', 'ML'] and amplitudeDisp is not None: amplitudeDisp.setPickID(pick.publicID()) amplitudeDisp.setWaveformID(streamID) amplitudeDisp.setPeriod( DataModel.RealQuantity(ampPeriod)) amplitudeDisp.setType(stationMag.type()) ep.add(amplitudeDisp) if stationMag.type() in ['Ms(BB)'] and amplitudeVel is not None: amplitudeVel.setPickID(pick.publicID()) amplitudeVel.setWaveformID(streamID) amplitudeVel.setPeriod( DataModel.RealQuantity(ampPeriod)) amplitudeVel.setType(stationMag.type()) ep.add(amplitudeVel) stationMag.setWaveformID(streamID) origin.add(stationMag) stationMagContrib = DataModel.StationMagnitudeContribution() stationMagContrib.setStationMagnitudeID( stationMag.publicID()) magType = stationMag.type() if magType == 'ML': if magnitudeML is None: magnitudeML = DataModel.Magnitude.Create() magnitudeML.add(stationMagContrib) elif magType == 'Ms(BB)': if magnitudeMS is None: magnitudeMS = DataModel.Magnitude.Create() magnitudeMS.add(stationMagContrib) elif magType == 'mb': if magnitudeMB is None: magnitudeMB = DataModel.Magnitude.Create() magnitudeMB.add(stationMagContrib) pick = None staCode = None compCode = None stationMag = None stationMagBB = None amplitudeDisp = None amplitudeVel = None amplitudeSNR = None amplitudeBB = None continue # empty key elif len(a) == 1: Logging.warning('Line %i: key without value' % iLine) continue value = a[1].strip() if pick is None: pick = DataModel.Pick.Create() arrival = DataModel.Arrival() try: ############################################################## # station parameters # station code if keyLower == 'station code': staCode = value # pick time elif keyLower == 'onset time': pick.setTime(DataModel.TimeQuantity(self.parseTime(value))) # pick onset type elif keyLower == 'onset type': found = False for onset in [DataModel.EMERGENT, DataModel.IMPULSIVE, DataModel.QUESTIONABLE]: if value == DataModel.EPickOnsetNames_name(onset): pick.setOnset(onset) found = True break if not found: raise Exception('Unsupported onset value') # phase code elif keyLower == 'phase name': phase = DataModel.Phase() phase.setCode(value) pick.setPhaseHint(phase) # event type elif keyLower == 'event type': evttype = EventTypes[value] event.setType(evttype) originComments[key] = value # filter ID elif keyLower == 'applied filter': pick.setFilterID(value) # channel code, prepended by configured Channel prefix if only # one character is found elif keyLower == 'component': compCode = value # pick evaluation mode elif keyLower == 'pick type': found = False for mode in [DataModel.AUTOMATIC, DataModel.MANUAL]: if value == DataModel.EEvaluationModeNames_name(mode): pick.setEvaluationMode(mode) found = True break if not found: raise Exception('Unsupported evaluation mode value') # pick author elif keyLower == 'analyst': creationInfo = DataModel.CreationInfo() creationInfo.setAuthor(value) pick.setCreationInfo(creationInfo) # pick polarity # isn't tested elif keyLower == 'sign': if value == 'positive': sign = '0' # positive elif value == 'negative': sign = '1' # negative else: sign = '2' # unknown pick.setPolarity(float(sign)) # arrival weight elif keyLower == 'weight': arrival.setWeight(float(value)) # arrival azimuth elif keyLower == 'theo. azimuth (deg)': arrival.setAzimuth(float(value)) # pick theo backazimuth elif keyLower == 'theo. backazimuth (deg)': if pick.slownessMethodID() == 'corrected': Logging.debug('Line %i: ignoring parameter: %s' % ( iLine, key)) else: pick.setBackazimuth( DataModel.RealQuantity(float(value))) pick.setSlownessMethodID('theoretical') # pick beam slowness elif keyLower == 'beam-slowness (sec/deg)': if pick.slownessMethodID() == 'corrected': Logging.debug('Line %i: ignoring parameter: %s' % ( iLine, key)) else: pick.setHorizontalSlowness( DataModel.RealQuantity(float(value))) pick.setSlownessMethodID('Array Beam') # pick beam backazimuth elif keyLower == 'beam-azimuth (deg)': if pick.slownessMethodID() == 'corrected': Logging.debug('Line %i: ignoring parameter: %s' % ( iLine, key)) else: pick.setBackazimuth( DataModel.RealQuantity(float(value))) # pick epi slowness elif keyLower == 'epi-slowness (sec/deg)': pick.setHorizontalSlowness( DataModel.RealQuantity(float(value))) pick.setSlownessMethodID('corrected') # pick epi backazimuth elif keyLower == 'epi-azimuth (deg)': pick.setBackazimuth(DataModel.RealQuantity(float(value))) # arrival distance degree elif keyLower == 'distance (deg)': arrival.setDistance(float(value)) # arrival distance km, recalculates for degree elif keyLower == 'distance (km)': if isinstance(arrival.distance(), float): Logging.debug('Line %i: ignoring parameter: %s' % ( iLine-1, 'distance (deg)')) arrival.setDistance(float(value) * km2degFac) # arrival time residual elif keyLower == 'residual time': arrival.setTimeResidual(float(value)) # amplitude snr elif keyLower == 'signal/noise': amplitudeSNR = DataModel.Amplitude.Create() amplitudeSNR.setType('SNR') amplitudeSNR.setAmplitude( DataModel.RealQuantity(float(value))) # amplitude period elif keyLower.startswith('period'): ampPeriod = float(value) # amplitude value for displacement elif keyLower == 'amplitude (nm)': amplitudeDisp = DataModel.Amplitude.Create() amplitudeDisp.setAmplitude( DataModel.RealQuantity(float(value))) amplitudeDisp.setUnit('nm') # amplitude value for velocity elif keyLower.startswith('vel. amplitude'): amplitudeVel = DataModel.Amplitude.Create() amplitudeVel.setAmplitude( DataModel.RealQuantity(float(value))) amplitudeVel.setUnit('nm/s') elif keyLower == 'bb amplitude (nm/sec)': amplitudeBB = DataModel.Amplitude.Create() amplitudeBB.setAmplitude( DataModel.RealQuantity(float(value))) amplitudeBB.setType('mB') amplitudeBB.setUnit('nm/s') amplitudeBB.setPeriod(DataModel.RealQuantity(ampBBPeriod)) elif keyLower == 'bb period (sec)': ampBBPeriod = float(value) elif keyLower == 'broadband magnitude': magType = self.parseMagType('bb') stationMagBB = DataModel.StationMagnitude.Create() stationMagBB.setMagnitude( DataModel.RealQuantity(float(value))) stationMagBB.setType(magType) stationMagBB.setAmplitudeID(amplitudeBB.publicID()) # ignored elif keyLower == 'quality number': Logging.debug('Line %i: ignoring parameter: %s' % ( iLine, key)) # station magnitude value and type elif keyLower.startswith('magnitude '): magType = self.parseMagType(key[10:]) stationMag = DataModel.StationMagnitude.Create() stationMag.setMagnitude( DataModel.RealQuantity(float(value))) if len(magType) > 0: stationMag.setType(magType) if magType == 'mb': stationMag.setAmplitudeID(amplitudeDisp.publicID()) elif magType == 'MS(BB)': stationMag.setAmplitudeID(amplitudeVel.publicID()) else: Logging.debug('Line %i: Magnitude Type not known %s.' % ( iLine, magType)) ############################################################### # origin parameters # event ID, added as origin comment later on elif keyLower == 'event id': originComments[key] = value # magnitude value and type elif keyLower == 'mean bb magnitude': magType = self.parseMagType('bb') if magnitudeBB is None: magnitudeBB = DataModel.Magnitude.Create() magnitudeBB.setMagnitude( DataModel.RealQuantity(float(value))) magnitudeBB.setType(magType) elif keyLower.startswith('mean magnitude '): magType = self.parseMagType(key[15:]) if magType == 'ML': if magnitudeML is None: magnitudeML = DataModel.Magnitude.Create() magnitudeML.setMagnitude( DataModel.RealQuantity(float(value))) magnitudeML.setType(magType) elif magType == 'Ms(BB)': if magnitudeMS is None: magnitudeMS = DataModel.Magnitude.Create() magnitudeMS.setMagnitude( DataModel.RealQuantity(float(value))) magnitudeMS.setType(magType) elif magType == 'mb': if magnitudeMB is None: magnitudeMB = DataModel.Magnitude.Create() magnitudeMB.setMagnitude( DataModel.RealQuantity(float(value))) magnitudeMB.setType(magType) else: Logging.warning('Line %i: Magnitude type %s not defined yet.' % ( iLine, magType)) # latitude elif keyLower == 'latitude': origin.latitude().setValue(float(value)) latFound = True elif keyLower == 'error in latitude (km)': origin.latitude().setUncertainty(float(value)) # longitude elif keyLower == 'longitude': origin.longitude().setValue(float(value)) lonFound = True elif keyLower == 'error in longitude (km)': origin.longitude().setUncertainty(float(value)) # depth elif keyLower == 'depth (km)': origin.setDepth(DataModel.RealQuantity(float(value))) if depthError is not None: origin.depth().setUncertainty(depthError) elif keyLower == 'depth type': Logging.debug('Line %i: ignoring parameter: %s' % ( iLine, key)) elif keyLower == 'error in depth (km)': depthError = float(value) try: origin.depth().setUncertainty(depthError) except Core.ValueException: pass # time elif keyLower == 'origin time': origin.time().setValue(self.parseTime(value)) elif keyLower == 'error in origin time': origin.time().setUncertainty(float(value)) # location method elif keyLower == 'location method': origin.setMethodID(str(value)) # region table, added as origin comment later on elif keyLower == 'region table': originComments[key] = value # region table, added as origin comment later on elif keyLower == 'region id': originComments[key] = value # source region, added as origin comment later on elif keyLower == 'source region': originComments[key] = value # used station count elif keyLower == 'no. of stations used': if originQuality is None: originQuality = DataModel.OriginQuality() originQuality.setUsedStationCount(int(value)) # ignored elif keyLower == 'reference location name': Logging.debug('Line %i: ignoring parameter: %s' % ( iLine, key)) # confidence ellipsoid major axis elif keyLower == 'error ellipse major': if originCE is None: originCE = DataModel.ConfidenceEllipsoid() originCE.setSemiMajorAxisLength(float(value)) # confidence ellipsoid minor axis elif keyLower == 'error ellipse minor': if originCE is None: originCE = DataModel.ConfidenceEllipsoid() originCE.setSemiMinorAxisLength(float(value)) # confidence ellipsoid rotation elif keyLower == 'error ellipse strike': if originCE is None: originCE = DataModel.ConfidenceEllipsoid() originCE.setMajorAxisRotation(float(value)) # azimuthal gap elif keyLower == 'max azimuthal gap (deg)': if originQuality is None: originQuality = DataModel.OriginQuality() originQuality.setAzimuthalGap(float(value)) # creation info author elif keyLower == 'author': origin.creationInfo().setAuthor(value) # creation info agency elif keyLower == 'source of information': origin.creationInfo().setAgencyID(value) # earth model id elif keyLower == 'velocity model': origin.setEarthModelID(value) # standard error elif keyLower == 'rms of residuals (sec)': if originQuality is None: originQuality = DataModel.OriginQuality() originQuality.setStandardError(float(value)) # ignored elif keyLower == 'phase flags': Logging.debug('Line %i: ignoring parameter: %s' % ( iLine, key)) # ignored elif keyLower == 'location input params': Logging.debug('Line %i: ignoring parameter: %s' % ( iLine, key)) # missing keys elif keyLower == 'ampl&period source': Logging.debug('Line %i: ignoring parameter: %s' % ( iLine, key)) elif keyLower == 'location quality': Logging.debug('Line %i: ignoring parameter: %s' % ( iLine, key)) elif keyLower == 'reference latitude': Logging.debug('Line %i: ignoring parameter: %s' % ( iLine, key)) elif keyLower == 'reference longitude': Logging.debug('Line %i: ignoring parameter: %s' % ( iLine, key)) elif keyLower.startswith('amplitude time'): Logging.debug('Line %i: ignoring parameter: %s' % ( iLine, key)) # unknown key else: Logging.warning('Line %i: ignoring unknown parameter: %s' % (iLine, key)) except ValueError as ve: Logging.warning('Line %i: can not parse %s value' % ( iLine, key)) except Exception: Logging.error('Line %i: %s' % (iLine, str(traceback.format_exc()))) return None # check if not latFound: Logging.warning('could not add origin, missing latitude parameter') elif not lonFound: Logging.warning( 'could not add origin, missing longitude parameter') elif not origin.time().value().valid(): Logging.warning( 'could not add origin, missing origin time parameter') else: if magnitudeMB is not None: origin.add(magnitudeMB) if magnitudeML is not None: origin.add(magnitudeML) if magnitudeMS is not None: origin.add(magnitudeMS) if magnitudeBB is not None: origin.add(magnitudeBB) ep.add(event) ep.add(origin) if originQuality is not None: origin.setQuality(originQuality) if originCE is not None: uncertainty = DataModel.OriginUncertainty() uncertainty.setConfidenceEllipsoid(originCE) origin.setUncertainty(uncertainty) for k, v in originComments.items(): comment = DataModel.Comment() comment.setId(k) comment.setText(v) origin.add(comment) return ep
def parsePOST(self, content): nLine = 0 for line in content: nLine += 1 line = line.strip() # ignore empty and comment lines if len(line) == 0 or line[0] == '#': continue # collect parameter (non stream lines) toks = line.split("=", 1) if len(toks) > 1: key = toks[0].strip().lower() isPOSTParam = False for p in self.POSTParams: if p == key: if key not in self._args: self._args[key] = [] self._args[key].append(toks[1].strip()) isPOSTParam = True break if isPOSTParam: continue # time parameters not allowed in POST header for p in self.TimeParams: if p == key: raise ValueError, "time parameter in line %i not " \ "allowed in POST request" % nLine # stream parameters not allowed in POST header for p in self.StreamParams: if p == key: raise ValueError, "stream parameter in line %i not " \ "allowed in POST request" % nLine raise ValueError, "invalid parameter in line %i" % nLine else: # stream parameters toks = line.split() nToks = len(toks) if nToks != 5 and nToks != 6: raise ValueError, "invalid number of stream components " \ "in line %i" % nLine ro = RequestOptions() # net, sta, loc, cha ro.channel = RequestOptions.Channel() ro.channel.net = toks[0].split(',') ro.channel.sta = toks[1].split(',') ro.channel.loc = toks[2].split(',') ro.channel.cha = toks[3].split(',') msg = "invalid %s value in line %i" for net in ro.channel.net: if ro.ChannelChars(net): raise ValueError, msg % ('network', nLine) for sta in ro.channel.sta: if ro.ChannelChars(sta): raise ValueError, msg % ('station', nLine) for loc in ro.channel.loc: if loc != "--" and ro.ChannelChars(loc): raise ValueError, msg % ('location', nLine) for cha in ro.channel.cha: if ro.ChannelChars(cha): raise ValueError, msg % ('channel', nLine) # start/end time ro.time = RequestOptions.Time() ro.time.start = Time() for fmt in RequestOptions.TimeFormats: if ro.time.start.fromString(toks[4], fmt): break logEnd = "-" if len(toks) > 5: ro.time.end = Time() for fmt in RequestOptions.TimeFormats: if ro.time.end.fromString(toks[5], fmt): break logEnd = ro.time.end.iso() Logging.debug("ro: %s.%s.%s.%s %s %s" % (ro.channel.net, ro.channel.sta, ro.channel.loc, ro.channel.cha, ro.time.start.iso(), logEnd)) self.streams.append(ro) if len(self.streams) == 0: raise ValueError, "at least one stream line is required"
def _processRequestText(self, req, ro, dac): if req._disconnected: return False skipRestricted = not self._allowRestricted or \ (ro.restricted is not None and not ro.restricted) data = "" lines = [] # level = network if not ro.includeSta: data = "#Network|Description|StartTime|EndTime|TotalStations\n" # iterate over inventory networks for net in ro.networkIter(self._inv, True): if req._disconnected: return False if skipRestricted and utils.isRestricted(net): continue # at least one matching station is required stationFound = False for sta in ro.stationIter(net, False): if req._disconnected: return False if self._matchStation(net, sta, ro, dac) and \ not (skipRestricted and utils.isRestricted(sta)): stationFound = True break if not stationFound: continue start, end = self._formatEpoch(net) lines.append( ("%s %s" % (net.code(), start), "%s|%s|%s|%s|%i\n" % (net.code(), net.description(), start, end, net.stationCount()))) # level = station elif not ro.includeCha: data = "#Network|Station|Latitude|Longitude|Elevation|" \ "SiteName|StartTime|EndTime\n" # iterate over inventory networks for net in ro.networkIter(self._inv, False): if req._disconnected: return False if skipRestricted and utils.isRestricted(net): continue # iterate over inventory stations for sta in ro.stationIter(net, True): if req._disconnected: return False if not self._matchStation(net, sta, ro, dac) or \ (skipRestricted and utils.isRestricted(sta)): continue try: lat = str(sta.latitude()) except ValueError: lat = '' try: lon = str(sta.longitude()) except ValueError: lon = '' try: elev = str(sta.elevation()) except ValueError: elev = '' try: desc = sta.description() except ValueError: desc = '' start, end = self._formatEpoch(sta) lines.append(("%s.%s %s" % (net.code(), sta.code(), start), "%s|%s|%s|%s|%s|%s|%s|%s\n" % (net.code(), sta.code(), lat, lon, elev, desc, start, end))) # level = channel (resonse level not supported in text format) else: data = "#Network|Station|Location|Channel|Latitude|Longitude|" \ "Elevation|Depth|Azimuth|Dip|SensorDescription|Scale|" \ "ScaleFreq|ScaleUnits|SampleRate|StartTime|EndTime\n" # iterate over inventory networks for net in ro.networkIter(self._inv, False): if req._disconnected: return False if skipRestricted and utils.isRestricted(net): continue # iterate over inventory stations, locations, streams for sta in ro.stationIter(net, False): if req._disconnected: return False if skipRestricted and utils.isRestricted(sta): continue for loc in ro.locationIter(net, sta, True): for stream in ro.streamIter(net, sta, loc, True, dac): if skipRestricted and utils.isRestricted(stream): continue try: lat = str(loc.latitude()) except ValueError: lat = '' try: lon = str(loc.longitude()) except ValueError: lon = '' try: elev = str(loc.elevation()) except ValueError: elev = '' try: depth = str(stream.depth()) except ValueError: depth = '' try: azi = str(stream.azimuth()) except ValueError: azi = '' try: dip = str(stream.dip()) except ValueError: dip = '' desc = '' try: sensor = self._inv.findSensor(stream.sensor()) if sensor is not None: desc = sensor.description() except ValueError: pass try: scale = str(stream.gain()) except ValueError: scale = '' try: scaleFreq = str(stream.gainFrequency()) except ValueError: scaleFreq = '' try: scaleUnit = str(stream.gainUnit()) except ValueError: scaleUnit = '' try: sr = str(stream.sampleRateNumerator() / stream.sampleRateDenominator()) except ValueError as ZeroDevisionError: sr = '' start, end = self._formatEpoch(stream) lines.append( ("%s.%s.%s.%s %s" % (net.code(), sta.code(), loc.code(), stream.code(), start), "%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|" "%s|%s|%s|%s|%s|%s\n" % (net.code(), sta.code(), loc.code(), stream.code(), lat, lon, elev, depth, azi, dip, desc, scale, scaleFreq, scaleUnit, sr, start, end))) # sort lines and append to final data string lines.sort(key=lambda line: line[0]) for line in lines: data += line[1] # Return 204 if no matching inventory was found if len(lines) == 0: msg = "no matching inventory found" self.writeErrorPage(req, http.NO_CONTENT, msg, ro) return False utils.writeTS(req, data) Logging.debug("%s: returned %i lines (total bytes: %i)" % (ro.service, len(lines), len(data))) utils.accessLog(req, ro, http.OK, len(data), None) return True
def _processRequestExp(self, req, ro, dbq, exp, ep): objCount = ep.eventCount() maxObj = Application.Instance()._queryObjects if not HTTP.checkObjects(req, objCount, maxObj): return False pickIDs = set() if ro.picks is None: ro.picks = True # add related information for iEvent in xrange(ep.eventCount()): if req._disconnected: return False e = ep.event(iEvent) if self._hideAuthor: self._removeAuthor(e) # eventDescriptions and comments objCount += dbq.loadEventDescriptions(e) if ro.comments: objCount += self._loadComments(dbq, e) if not HTTP.checkObjects(req, objCount, maxObj): return False # origin references: either all or preferred only dbIter = dbq.getObjects(e, DataModel.OriginReference.TypeInfo()) for obj in dbIter: oRef = DataModel.OriginReference.Cast(obj) if oRef is None: continue if ro.allOrigins: e.add(oRef) elif oRef.originID() == e.preferredOriginID(): e.add(oRef) dbIter.close() # TODO: if focal mechanisms are added make sure derived # origin is loaded objCount += e.originReferenceCount() if not HTTP.checkObjects(req, objCount, maxObj): return False # TODO: add focal mechanisms # origins for iORef in xrange(e.originReferenceCount()): if req._disconnected: return False oID = e.originReference(iORef).originID() obj = dbq.getObject(DataModel.Origin.TypeInfo(), oID) o = DataModel.Origin.Cast(obj) if o is None: continue ep.add(o) objCount += 1 if self._hideAuthor: self._removeAuthor(o) # comments if ro.comments: objCount += self._loadComments(dbq, o) if not HTTP.checkObjects(req, objCount, maxObj): return False # magnitudes dbIter = dbq.getObjects(oID, DataModel.Magnitude.TypeInfo()) for obj in dbIter: mag = DataModel.Magnitude.Cast(obj) if mag is None: continue if ro.allMags: o.add(mag) elif mag.publicID() == e.preferredMagnitudeID(): o.add(mag) dbIter.close() if self._hideAuthor: self._removeAuthor(mag) objCount += o.magnitudeCount() if ro.comments: for iMag in xrange(o.magnitudeCount()): objCount += self._loadComments(dbq, o.magnitude(iMag)) if not HTTP.checkObjects(req, objCount, maxObj): return False # TODO station magnitudes, amplitudes # - added pick id for each pick referenced by amplitude # arrivals if ro.arrivals: objCount += dbq.loadArrivals(o) if self._removeAuthor: for iArrival in xrange(o.arrivalCount()): self._removeAuthor(o.arrival(iArrival)) # collect pick IDs if requested if ro.picks: for iArrival in xrange(o.arrivalCount()): pickIDs.add(o.arrival(iArrival).pickID()) if not HTTP.checkObjects(req, objCount, maxObj): return False # picks if pickIDs: objCount += len(pickIDs) if not HTTP.checkObjects(req, objCount, maxObj): return False for pickID in pickIDs: obj = dbq.getObject(DataModel.Pick.TypeInfo(), pickID) pick = DataModel.Pick.Cast(obj) if pick is not None: if self._hideAuthor: self._removeAuthor(pick) if ro.comments: objCount += self._loadComments(dbq, pick) ep.add(pick) if not HTTP.checkObjects(req, objCount, maxObj): return False # write response sink = utils.Sink(req) if not exp.write(sink, ep): return False Logging.debug("%s: returned %i events and %i origins (total " \ "objects/bytes: %i/%i)" % (ro.service, ep.eventCount(), ep.originCount(), objCount, sink.written)) utils.accessLog(req, ro, http.OK, sink.written, None) return True
def _processRequestText(self, req, ro, dbq, ep): lineCount = 0 line = "#EventID|Time|Latitude|Longitude|Depth/km|Author|Catalog|" \ "Contributor|ContributorID|MagType|Magnitude|MagAuthor|" \ "EventLocationName|EventType\n" df = "%FT%T.%f" utils.writeTS(req, line) byteCount = len(line) # add related information for iEvent in range(ep.eventCount()): e = ep.event(iEvent) eID = e.publicID() # query for preferred origin obj = dbq.getObject(DataModel.Origin.TypeInfo(), e.preferredOriginID()) o = DataModel.Origin.Cast(obj) if o is None: Logging.warning("preferred origin of event '%s' not found: " "%s" % (eID, e.preferredOriginID())) continue # depth try: depth = str(o.depth().value()) except ValueError: depth = '' # author if self._hideAuthor: author = '' else: try: author = o.creationInfo().author() except ValueError: author = '' # contributor try: contrib = e.creationInfo().agencyID() except ValueError: contrib = '' # query for preferred magnitude (if any) mType, mVal, mAuthor = '', '', '' if e.preferredMagnitudeID(): obj = dbq.getObject(DataModel.Magnitude.TypeInfo(), e.preferredMagnitudeID()) m = DataModel.Magnitude.Cast(obj) if m is not None: mType = m.type() mVal = str(m.magnitude().value()) if self._hideAuthor: mAuthor = '' else: try: mAuthor = m.creationInfo().author() except ValueError: pass # event description dbq.loadEventDescriptions(e) region = '' for i in range(e.eventDescriptionCount()): ed = e.eventDescription(i) if ed.type() == DataModel.REGION_NAME: region = ed.text() break # event type try: eType = DataModel.QMLTypeMapper.EventTypeToString(e.type()) except ValueError: eType = '' if req._disconnected: return False line = "%s|%s|%f|%f|%s|%s||%s|%s|%s|%s|%s|%s|%s\n" % ( eID, o.time().value().toString(df), o.latitude().value(), o.longitude().value(), depth, author, contrib, eID, mType, mVal, mAuthor, region, eType) utils.writeTS(req, line) lineCount += 1 byteCount += len(line) # write response Logging.debug("%s: returned %i events (total bytes: %i) " % (ro.service, lineCount, byteCount)) utils.accessLog(req, ro, http.OK, byteCount, None) return True
def _findEvents(self, ep, ro, dbq): db = Application.Instance().database() def _T(name): return db.convertColumnName(name) def _time(time): return db.timeToString(time) orderByMag = ro.orderBy and ro.orderBy.startswith('magnitude') reqMag = ro.mag or orderByMag reqDist = ro.geo and ro.geo.bCircle colPID = _T('publicID') colTime = _T('time_value') colMag = _T('magnitude_value') if orderByMag: colOrderBy = "m.%s" % colMag else: colOrderBy = "o.%s" % colTime bBox = None if ro.geo: colLat, colLon = _T('latitude_value'), _T('longitude_value') if ro.geo.bBox: bBox = ro.geo.bBox else: bBox = ro.geo.bCircle.calculateBBox() # SELECT -------------------------------- q = "SELECT DISTINCT pe.%s, e.*, %s" % (colPID, colOrderBy) if reqDist: # Great circle distance calculated by Haversine formula c = ro.geo.bCircle q += ", DEGREES(ACOS(" \ "COS(RADIANS(o.%s)) * COS(RADIANS(%s)) * " \ "COS(RADIANS(o.%s) - RADIANS(%s)) + SIN(RADIANS(o.%s)) * " \ "SIN(RADIANS(%s)))) AS distance" % ( colLat, c.lat, colLon, c.lon, colLat, c.lat) # FROM ---------------------------------- q += " FROM Event AS e, PublicObject AS pe" \ ", Origin AS o, PublicObject AS po" if reqMag: q += ", Magnitude AS m, PublicObject AS pm" # WHERE --------------------------------- q += " WHERE e._oid = pe._oid" # event type white list filter, defined via configuration and/or request # parameters types = None if self._eventTypeWhitelist and ro.eventTypes: types = self._eventTypeWhitelist.intersection(ro.eventTypes) if not types: Logging.debug('all requested event types filtered by ' 'configured event type white list') return elif self._eventTypeWhitelist: types = self._eventTypeWhitelist elif ro.eventTypes: types = ro.eventTypes if types is not None: allowNull = -1 in types types = [x for x in types if x >= 0] etqIn = "e.%s IN ('%s')" % (_T('type'), "', '".join( DataModel.EEventTypeNames.name(x) for x in types)) if allowNull: etqNull = "e.%s is NULL" % _T('type') if types: q += " AND (%s OR %s)" % (etqNull, etqIn) else: q += " AND %s" % etqNull else: q += " AND %s" % etqIn # event type black list filter, defined in configuration if self._eventTypeBlacklist: allowNull = -1 not in self._eventTypeBlacklist types = [x for x in self._eventTypeBlacklist if x >= 0] etqNotIn = "e.%s NOT IN ('%s')" % (_T('type'), "', '".join( DataModel.EEventTypeNames.name(x) for x in types)) if allowNull: etqNull = "e.%s is NULL" % _T('type') if types: q += " AND (%s OR %s)" % (etqNull, etqNotIn) else: q += " AND %s" % etqNull else: q += " AND %s" % etqNotIn # event agency id filter if ro.contributors: q += " AND e.%s AND upper(e.%s) IN('%s')" % ( _T('creationinfo_used'), _T('creationinfo_agencyid'), "', '".join(ro.contributors).upper()) # origin information filter q += " AND o._oid = po._oid AND po.%s = e.%s" % ( colPID, _T('preferredOriginID')) # evaluation mode config parameter if self._evaluationMode is not None: colEvalMode = _T('evaluationMode') q += " AND o.%s = '%s'" % (colEvalMode, DataModel.EEvaluationModeNames.name( self._evaluationMode)) # time if ro.time: colTimeMS = _T('time_value_ms') if ro.time.start is not None: t = _time(ro.time.start) ms = ro.time.start.microseconds() q += " AND (o.%s > '%s' OR (o.%s = '%s' AND o.%s >= %i))" % ( colTime, t, colTime, t, colTimeMS, ms) if ro.time.end is not None: t = _time(ro.time.end) ms = ro.time.end.microseconds() q += " AND (o.%s < '%s' OR (o.%s = '%s' AND o.%s <= %i))" % ( colTime, t, colTime, t, colTimeMS, ms) # bounding box if bBox: if bBox.minLat is not None: q += " AND o.%s >= %s" % (colLat, bBox.minLat) if bBox.maxLat is not None: q += " AND o.%s <= %s" % (colLat, bBox.maxLat) if bBox.dateLineCrossing(): q += " AND (o.%s >= %s OR o.%s <= %s)" % (colLon, bBox.minLon, colLon, bBox.maxLon) else: if bBox.minLon is not None: q += " AND o.%s >= %s" % (colLon, bBox.minLon) if bBox.maxLon is not None: q += " AND o.%s <= %s" % (colLon, bBox.maxLon) # depth if ro.depth: q += " AND o.%s" % _T("depth_used") colDepth = _T('depth_value') if ro.depth.min is not None: q += " AND o.%s >= %s" % (colDepth, ro.depth.min) if ro.depth.max is not None: q += " AND o.%s <= %s" % (colDepth, ro.depth.max) # updated after if ro.updatedAfter: t = _time(ro.updatedAfter) ms = ro.updatedAfter.microseconds() colCTime = _T('creationinfo_creationtime') colCTimeMS = _T('creationinfo_creationtime_ms') colMTime = _T('creationinfo_modificationtime') colMTimeMS = _T('creationinfo_modificationtime_ms') tFilter = "(o.%s > '%s' OR (o.%s = '%s' AND o.%s > %i))" q += " AND (" q += tFilter % (colCTime, t, colCTime, t, colCTimeMS, ms) + " OR " q += tFilter % (colMTime, t, colMTime, t, colMTimeMS, ms) + ")" # magnitude information filter if reqMag: q += " AND m._oid = pm._oid AND " if ro.mag and ro.mag.type: # join magnitude table on oID of origin and magnitude type q += "m._parent_oid = o._oid AND m.%s = '%s'" % ( _T('type'), dbq.toString(ro.mag.type)) else: # join magnitude table on preferred magnitude id of event q += "pm.%s = e.%s" % (colPID, _T('preferredMagnitudeID')) if ro.mag and ro.mag.min is not None: q += " AND m.%s >= %s" % (colMag, ro.mag.min) if ro.mag and ro.mag.max is not None: q += " AND m.%s <= %s" % (colMag, ro.mag.max) # ORDER BY ------------------------------ q += " ORDER BY %s" % colOrderBy if ro.orderBy and ro.orderBy.endswith('-asc'): q += " ASC" else: q += " DESC" # SUBQUERY distance (optional) ---------- if reqDist: q = "SELECT * FROM (%s) AS subquery WHERE distance " % q c = ro.geo.bCircle if c.minRad is not None: q += ">= %s" % c.minRad if c.maxRad is not None: if c.minRad is not None: q += " AND distance " q += "<= %s" % c.maxRad # LIMIT/OFFSET -------------------------- if ro.limit is not None or ro.offset is not None: # Postgres allows to omit the LIMIT parameter for offsets, MySQL # does not. According to the MySQL manual a very large number should # be used for this case. l = DBMaxUInt if ro.limit is not None: l = ro.limit q += " LIMIT %i" % l if ro.offset is not None: q += " OFFSET %i" % ro.offset Logging.debug("event query: %s" % q) for e in dbq.getObjectIterator(q, DataModel.Event.TypeInfo()): ep.add(DataModel.Event.Cast(e))
def run(self): modeStr = None if self._evaluationMode is not None: modeStr = DataModel.EEvaluationModeNames.name(self._evaluationMode) whitelistStr = "<None>" if self._eventTypeWhitelist is not None: whitelistStr = ", ".join(self._eventTypeWhitelist) blacklistStr = "<None>" if self._eventTypeBlacklist is not None: blacklistStr = ", ".join(self._eventTypeBlacklist) stationFilterStr = "<None>" if self._stationFilter is not None: stationFilterStr = self._stationFilter dataSelectFilterStr = "<None>" if self._dataSelectFilter is not None: dataSelectFilterStr = self._dataSelectFilter Logging.debug("\n" \ "configuration read:\n" \ " serve\n" \ " dataselect : %s\n" \ " event : %s\n" \ " station : %s\n" \ " listenAddress : %s\n" \ " port : %i\n" \ " connections : %i\n" \ " htpasswd : %s\n" \ " accessLog : %s\n" \ " queryObjects : %i\n" \ " realtimeGap : %s\n" \ " samples (M) : %s\n" \ " allowRestricted : %s\n" \ " useArclinkAccess: %s\n" \ " hideAuthor : %s\n" \ " evaluationMode : %s\n" \ " eventType\n" \ " whitelist : %s\n" \ " blacklist : %s\n" \ " inventory filter\n" \ " station : %s\n" \ " dataSelect : %s\n" \ " debug enabled : %s\n" \ " trackdb\n" \ " enabled : %s\n" \ " defaultUser : %s\n" \ " auth\n" \ " enabled : %s\n" \ " gnupgHome : %s\n" % ( self._serveDataSelect, self._serveEvent, self._serveStation, self._listenAddress, self._port, self._connections, self._htpasswd, self._accessLogFile, self._queryObjects, self._realtimeGap, self._samplesM, self._allowRestricted, self._useArclinkAccess, self._hideAuthor, modeStr, whitelistStr, blacklistStr, stationFilterStr, dataSelectFilterStr, self._debugFilter, self._trackdbEnabled, self._trackdbDefaultUser, self._authEnabled, self._authGnupgHome)) if not self._serveDataSelect and not self._serveEvent and \ not self._serveStation: Logging.error("all services disabled through configuration") return False # access logger if requested if self._accessLogFile: self._accessLog = Log(self._accessLogFile) # load inventory needed by DataSelect and Station service stationInv = dataSelectInv = None if self._serveDataSelect or self._serveStation: retn = False stationInv = dataSelectInv = Inventory.Instance().inventory() Logging.info("inventory loaded") if self._serveDataSelect and self._serveStation: # clone inventory if station and dataSelect filter are distinct # else share inventory between both services if self._stationFilter != self._dataSelectFilter: dataSelectInv = self._cloneInventory(stationInv) retn = self._filterInventory(stationInv, self._stationFilter, "station") and \ self._filterInventory(dataSelectInv, self._dataSelectFilter, "dataSelect") else: retn = self._filterInventory(stationInv, self._stationFilter) elif self._serveStation: retn = self._filterInventory(stationInv, self._stationFilter) else: retn = self._filterInventory(dataSelectInv, self._dataSelectFilter) if not retn: return False if self._serveDataSelect: self._access.initFromSC3Routing(self.query().loadRouting()) DataModel.PublicObject.SetRegistrationEnabled(False) shareDir = os.path.join(Environment.Instance().shareDir(), 'fdsnws') # Overwrite/set mime type of *.wadl and *.xml documents. Instead of # using the official types defined in /etc/mime.types 'application/xml' # is used as enforced by the FDSNWS spec. static.File.contentTypes['.wadl'] = 'application/xml' static.File.contentTypes['.xml'] = 'application/xml' # create resource tree /fdsnws/... root = ListingResource() fileName = os.path.join(shareDir, 'favicon.ico') fileRes = static.File(fileName, 'image/x-icon') fileRes.childNotFound = NoResource() fileRes.isLeaf = True root.putChild('favicon.ico', fileRes) prefix = ListingResource() root.putChild('fdsnws', prefix) # right now service version is shared by all services serviceVersion = ServiceVersion() # dataselect if self._serveDataSelect: dataselect = ListingResource() prefix.putChild('dataselect', dataselect) dataselect1 = DirectoryResource( os.path.join(shareDir, 'dataselect.html')) dataselect.putChild('1', dataselect1) dataselect1.putChild('query', FDSNDataSelect(dataSelectInv)) msg = 'authorization for restricted time series data required' authSession = self._getAuthSessionWrapper(dataSelectInv, msg) dataselect1.putChild('queryauth', authSession) dataselect1.putChild('version', serviceVersion) fileRes = static.File(os.path.join(shareDir, 'dataselect.wadl')) fileRes.childNotFound = NoResource() dataselect1.putChild('application.wadl', fileRes) fileRes = static.File( os.path.join(shareDir, 'dataselect-builder.html')) fileRes.childNotFound = NoResource() dataselect1.putChild('builder', fileRes) if self._authEnabled: dataselect1.putChild( 'auth', AuthResource(self._authGnupgHome, self._userdb)) # event if self._serveEvent: event = ListingResource() prefix.putChild('event', event) event1 = DirectoryResource(os.path.join(shareDir, 'event.html')) event.putChild('1', event1) event1.putChild( 'query', FDSNEvent(self._hideAuthor, self._evaluationMode, self._eventTypeWhitelist, self._eventTypeBlacklist)) fileRes = static.File(os.path.join(shareDir, 'catalogs.xml')) fileRes.childNotFound = NoResource() event1.putChild('catalogs', fileRes) fileRes = static.File(os.path.join(shareDir, 'contributors.xml')) fileRes.childNotFound = NoResource() event1.putChild('contributors', fileRes) event1.putChild('version', serviceVersion) fileRes = static.File(os.path.join(shareDir, 'event.wadl')) fileRes.childNotFound = NoResource() event1.putChild('application.wadl', fileRes) fileRes = static.File(os.path.join(shareDir, 'event-builder.html')) fileRes.childNotFound = NoResource() event1.putChild('builder', fileRes) # station if self._serveStation: station = ListingResource() prefix.putChild('station', station) station1 = DirectoryResource(os.path.join(shareDir, 'station.html')) station.putChild('1', station1) station1.putChild( 'query', FDSNStation(stationInv, self._allowRestricted, self._queryObjects)) station1.putChild('version', serviceVersion) fileRes = static.File(os.path.join(shareDir, 'station.wadl')) fileRes.childNotFound = NoResource() station1.putChild('application.wadl', fileRes) fileRes = static.File( os.path.join(shareDir, 'station-builder.html')) fileRes.childNotFound = NoResource() station1.putChild('builder', fileRes) # static files fileRes = static.File(os.path.join(shareDir, 'js')) fileRes.childNotFound = NoResource() fileRes.hideInListing = True prefix.putChild('js', fileRes) fileRes = static.File(os.path.join(shareDir, 'css')) fileRes.childNotFound = NoResource() fileRes.hideInListing = True prefix.putChild('css', fileRes) retn = False try: # start listen for incoming request reactor.listenTCP(self._port, Site(root), self._connections, self._listenAddress) # start processing Logging.info("start listening") log.addObserver(logSC3) reactor.run() retn = True except Exception, e: Logging.error(str(e))
def _processRequestExp(self, req, ro, dbq, exp, ep): objCount = ep.eventCount() maxObj = Application.Instance()._queryObjects if not self.checkObjects(req, objCount, maxObj): return False pickIDs = set() if ro.picks is None: ro.picks = True # add related information for iEvent in range(ep.eventCount()): if req._disconnected: return False e = ep.event(iEvent) if self._hideAuthor: self._removeAuthor(e) originIDs = set() magIDs = set() magIDs.add(e.preferredMagnitudeID()) # eventDescriptions and comments objCount += dbq.loadEventDescriptions(e) if ro.comments: objCount += self._loadComments(dbq, e) if not self.checkObjects(req, objCount, maxObj): return False # origin references: either all or preferred only dbIter = dbq.getObjects(e, DataModel.OriginReference.TypeInfo()) for obj in dbIter: oRef = DataModel.OriginReference.Cast(obj) if oRef is None: continue if ro.allOrigins: e.add(oRef) originIDs.add(oRef.originID()) elif oRef.originID() == e.preferredOriginID(): e.add(oRef) originIDs.add(oRef.originID()) dbIter.close() objCount += e.originReferenceCount() # focalMechanism references: either none, preferred only or all if ro.fm or ro.allFMs: dbIter = dbq.getObjects( e, DataModel.FocalMechanismReference.TypeInfo()) for obj in dbIter: fmRef = DataModel.FocalMechanismReference.Cast(obj) if fmRef is None: continue if ro.allFMs: e.add(fmRef) elif fmRef.focalMechanismID( ) == e.preferredFocalMechanismID(): e.add(fmRef) dbIter.close() objCount += e.focalMechanismReferenceCount() if not self.checkObjects(req, objCount, maxObj): return False # focal mechanisms: process before origins to add derived origin to # originID list since it may be missing from origin reference list for iFMRef in range(e.focalMechanismReferenceCount()): if req._disconnected: return False fmID = e.focalMechanismReference(iFMRef).focalMechanismID() obj = dbq.getObject(DataModel.FocalMechanism.TypeInfo(), fmID) fm = DataModel.FocalMechanism.Cast(obj) if fm is None: continue ep.add(fm) objCount += 1 if self._hideAuthor: self._removeAuthor(fm) # comments if ro.comments: objCount += self._loadComments(dbq, fm) # momentTensors objCount += dbq.loadMomentTensors(fm) if not self.checkObjects(req, objCount, maxObj): return False for iMT in range(fm.momentTensorCount()): mt = fm.momentTensor(iMT) originIDs.add(mt.derivedOriginID()) magIDs.add(mt.momentMagnitudeID()) if self._hideAuthor: self._removeAuthor(mt) if ro.comments: for iMT in range(fm.momentTensorCount()): objCount += self._loadComments(dbq, mt) objCount += dbq.loadDataUseds(mt) objCount += dbq.loadMomentTensorPhaseSettings(mt) if ro.staMTs: objCount += dbq.loadMomentTensorStationContributions( mt) for iStaMT in range( mt.momentTensorStationContributionCount()): objCount += dbq.load( mt.momentTensorStationContribution(iStaMT)) if not self.checkObjects(req, objCount, maxObj): return False # find ID of origin containing preferred Magnitude if e.preferredMagnitudeID(): obj = dbq.getObject(DataModel.Magnitude.TypeInfo(), e.preferredMagnitudeID()) m = DataModel.Magnitude.Cast(obj) if m is not None: oID = dbq.parentPublicID(m) if oID: originIDs.add(oID) # origins for oID in sorted(originIDs): if req._disconnected: return False obj = dbq.getObject(DataModel.Origin.TypeInfo(), oID) o = DataModel.Origin.Cast(obj) if o is None: continue ep.add(o) objCount += 1 if self._hideAuthor: self._removeAuthor(o) # comments if ro.comments: objCount += self._loadComments(dbq, o) if not self.checkObjects(req, objCount, maxObj): return False # magnitudes dbIter = dbq.getObjects(oID, DataModel.Magnitude.TypeInfo()) for obj in dbIter: mag = DataModel.Magnitude.Cast(obj) if mag is None: continue if ro.allMags: o.add(mag) elif mag.publicID() in magIDs: o.add(mag) dbIter.close() if self._hideAuthor: self._removeAuthor(mag) objCount += o.magnitudeCount() if ro.comments: for iMag in range(o.magnitudeCount()): objCount += self._loadComments(dbq, o.magnitude(iMag)) if not self.checkObjects(req, objCount, maxObj): return False # TODO station magnitudes, amplitudes # - added pick id for each pick referenced by amplitude # arrivals if ro.arrivals: objCount += dbq.loadArrivals(o) if self._removeAuthor: for iArrival in range(o.arrivalCount()): self._removeAuthor(o.arrival(iArrival)) # collect pick IDs if requested if ro.picks: for iArrival in range(o.arrivalCount()): pickIDs.add(o.arrival(iArrival).pickID()) if not self.checkObjects(req, objCount, maxObj): return False # picks if pickIDs: objCount += len(pickIDs) if not self.checkObjects(req, objCount, maxObj): return False for pickID in sorted(pickIDs): obj = dbq.getObject(DataModel.Pick.TypeInfo(), pickID) pick = DataModel.Pick.Cast(obj) if pick is not None: if self._hideAuthor: self._removeAuthor(pick) if ro.comments: objCount += self._loadComments(dbq, pick) ep.add(pick) if not self.checkObjects(req, objCount, maxObj): return False # write response sink = utils.Sink(req) if not exp.write(sink, ep): return False Logging.debug("%s: returned %i events and %i origins (total " "objects/chars: %i/%i)" % (ro.service, ep.eventCount(), ep.originCount(), objCount, sink.written)) utils.accessLog(req, ro, http.OK, sink.written, None) return True
# sort lines and append to final data string lines.sort(key=lambda line: line[0]) for line in lines: data += line[1] # Return 204 if no matching inventory was found if len(lines) == 0: msg = "no matching inventory found" data = self.renderErrorPage(req, http.NO_CONTENT, msg, ro) if data: utils.writeTS(req, data) return False utils.writeTS(req, data) Logging.debug("%s: returned %i lines (total bytes: %i)" % (ro.service, len(lines), len(data))) utils.accessLog(req, ro, http.OK, len(data), None) return True #--------------------------------------------------------------------------- # Checks if at least one location and channel combination matches the # request options @staticmethod def _matchStation(net, sta, ro, dac): # No filter: return true immediately if dac is None and \ (not ro.channel or (not ro.channel.loc and not ro.channel.cha)): return True for loc in ro.locationIter(net, sta, False): if dac is None and not ro.channel.cha and not ro.time:
# sort lines and append to final data string lines.sort(key = lambda line: line[0]) for line in lines: data += line[1] # Return 204 if no matching inventory was found if len(lines) == 0: msg = "no matching inventory found" data = HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro) if data: utils.writeTS(req, data) return False utils.writeTS(req, data) Logging.debug("%s: returned %i lines (total bytes: %i)" % ( ro.service, len(lines), len(data))) utils.accessLog(req, ro, http.OK, len(data), None) return True #--------------------------------------------------------------------------- # Checks if at least one location and channel combination matches the # request options @staticmethod def _matchStation(net, sta, ro, dac): # No filter: return true immediately if dac is None and \ ( not ro.channel or ( not ro.channel.loc and not ro.channel.cha ) ): return True for loc in ro.locationIter(net, sta, False):
def _findEvents(self, ep, ro, dbq): db = Application.Instance().database() def _T(name): return db.convertColumnName(name) def _time(time): return db.timeToString(time) reqMag = ro.mag or (ro.orderBy and ro.orderBy.startswith("magnitude")) reqDist = ro.geo and ro.geo.bCircle colPID = _T("publicID") colTime = _T("time_value") colMag = _T("magnitude_value") if reqMag: colOrderBy = "m.%s" % colMag else: colOrderBy = "o.%s" % colTime bBox = None if ro.geo: colLat, colLon = _T("latitude_value"), _T("longitude_value") if ro.geo.bBox: bBox = ro.geo.bBox else: bBox = ro.geo.bCircle.calculateBBox() # SELECT -------------------------------- q = "SELECT DISTINCT pe.%s, e.*, %s" % (colPID, colOrderBy) if reqDist: # Great circle distance calculated by Haversine formula c = ro.geo.bCircle q += ", DEGREES(ACOS(" \ "COS(RADIANS(o.%s)) * COS(RADIANS(%s)) * " \ "COS(RADIANS(o.%s) - RADIANS(%s)) + SIN(RADIANS(o.%s)) * " \ "SIN(RADIANS(%s)))) AS distance" % ( colLat, c.lat, colLon, c.lon, colLat, c.lat) # FROM ---------------------------------- q += " FROM Event AS e, PublicObject AS pe" \ ", Origin AS o, PublicObject AS po" if reqMag: q += ", Magnitude AS m, PublicObject AS pm" # WHERE --------------------------------- q += " WHERE e._oid = pe._oid" # event information filter if ro.contributors: q += " AND e.%s AND upper(e.%s) IN('%s')" % ( _T("creationinfo_used"), _T("creationinfo_agencyid"), "', '".join(ro.contributors).upper()) # origin information filter q += " AND o._oid = po._oid AND po.%s = e.%s" % ( colPID, _T("preferredOriginID")) # time if ro.time: colTimeMS = _T("time_value_ms") if ro.time.start is not None: t = _time(ro.time.start) ms = ro.time.start.microseconds() q += " AND (o.%s > '%s' OR (o.%s = '%s' AND o.%s >= %i)) " % ( colTime, t, colTime, t, colTimeMS, ms) if ro.time.end is not None: t = _time(ro.time.end) ms = ro.time.end.microseconds() q += " AND (o.%s < '%s' OR (o.%s = '%s' AND o.%s <= %i))" % ( colTime, t, colTime, t, colTimeMS, ms) # bounding box if bBox: if bBox.minLat is not None: q += " AND o.%s >= %s" % (colLat, bBox.minLat) if bBox.maxLat is not None: q += " AND o.%s <= %s" % (colLat, bBox.maxLat) if bBox.dateLineCrossing(): q += " AND (o.%s >= %s OR o.%s <= %s)" % ( colLon, bBox.minLon, colLon, bBox.maxLon) else: if bBox.minLon is not None: q += " AND o.%s >= %s" % (colLon, bBox.minLon) if bBox.maxLon is not None: q += " AND o.%s <= %s" % (colLon, bBox.maxLon) # depth if ro.depth: q += " AND o.%s" % _T("depth_used") colDepth = _T("depth_value") if ro.depth.min is not None: q += " AND o.%s >= %s" % (colDepth, ro.depth.min) if ro.depth.max is not None: q += " AND o.%s <= %s" % (colDepth, ro.depth.max) # updated after if ro.updatedAfter: t = _time(ro.updatedAfter) ms = ro.updatedAfter.microseconds() colCTime = _T("creationinfo_creationtime") colCTimeMS = _T("creationinfo_creationtime_ms") colMTime = _T("creationinfo_modificationtime") colMTimeMS = _T("creationinfo_modificationtime_ms") tFilter = "(o.%s > '%s' OR (o.%s = '%s' AND o.%s > %i))" q += " AND (" q += tFilter % (colCTime, t, colCTime, t, colCTimeMS, ms) + " OR " q += tFilter % (colMTime, t, colMTime, t, colMTimeMS, ms) + ")" # magnitude information filter if reqMag: q += " AND m._oid = pm._oid AND " if ro.mag and ro.mag.type: # join magnitude table on oID of origin and magnitude type q += "m._parent_oid = o._oid AND m.%s = '%s'" % (_T("type"), ro.mag.type) else: # join magnitude table on preferred magnitude id of event q += "pm.%s = e.%s" % (colPID, _T("preferredMagnitudeID")) if ro.mag and ro.mag.min is not None: q += " AND m.%s >= %s" % (colMag, ro.mag.min) if ro.mag and ro.mag.max is not None: q += " AND m.%s <= %s" % (colMag, ro.mag.max) # ORDER BY ------------------------------ q += " ORDER BY %s" % colOrderBy if ro.orderBy and ro.orderBy.endswith("-asc"): q += " ASC" else: q += " DESC" # SUBQUERY distance (optional) ---------- if reqDist: q = "SELECT * FROM (%s) AS subquery WHERE distance " % q c = ro.geo.bCircle if c.minRad is not None: q += ">= %s" % c.minRad if c.maxRad is not None: if c.minRad is not None: q += " AND distance " q += "<= %s" % c.maxRad # LIMIT/OFFSET -------------------------- if ro.limit is not None or ro.offset is not None: # Postgres allows to omit the LIMIT parameter for offsets, MySQL # does not. According to the MySQL manual a very large number should # be used for this case. l = DBMaxUInt if ro.limit is not None: l = ro.limit else: l = DBMaxUInt q += " LIMIT %i" % l if ro.offset is not None: q += " OFFSET %i" % ro.offset Logging.debug("event query: %s" % q) for e in dbq.getObjectIterator(q, DataModel.Event.TypeInfo()): ep.add(DataModel.Event.Cast(e))
def _processRequestExp(self, req, ro, exp, dac): if req._disconnected: return False staCount, locCount, chaCount, extCount, objCount = 0, 0, 0, 0, 0 DataModel.PublicObject.SetRegistrationEnabled(False) newInv = DataModel.Inventory() dataloggers, sensors, extents = set(), set(), set() skipRestricted = not self._allowRestricted or \ (ro.restricted is not None and not ro.restricted) levelNet = not ro.includeSta levelSta = ro.includeSta and not ro.includeCha # iterate over inventory networks for net in ro.networkIter(self._inv, levelNet): if req._disconnected: return False if skipRestricted and utils.isRestricted(net): continue newNet = DataModel.Network(net) # Copy comments for i in xrange(net.commentCount()): newNet.add(DataModel.Comment(net.comment(i))) # iterate over inventory stations of current network for sta in ro.stationIter(net, levelSta): if req._disconnected: return False if skipRestricted and utils.isRestricted(sta): continue if not HTTP.checkObjects(req, objCount, self._maxObj): return False if ro.includeCha: numCha, numLoc, d, s, e = \ self._processStation(newNet, net, sta, ro, dac, skipRestricted) if numCha > 0: locCount += numLoc chaCount += numCha extCount += len(e) objCount += numLoc + numCha + extCount if not HTTP.checkObjects(req, objCount, self._maxObj): return False dataloggers |= d sensors |= s extents |= e elif self._matchStation(net, sta, ro, dac): if ro.includeSta: newSta = DataModel.Station(sta) # Copy comments for i in xrange(sta.commentCount()): newSta.add(DataModel.Comment(sta.comment(i))) newNet.add(newSta) else: # no station output requested: one matching station # is sufficient to include the network newInv.add(newNet) objCount += 1 break if newNet.stationCount() > 0: newInv.add(newNet) staCount += newNet.stationCount() objCount += staCount + 1 # Return 204 if no matching inventory was found if newInv.networkCount() == 0: msg = "no matching inventory found" data = HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro) if data: utils.writeTS(req, data) return True # Copy references (dataloggers, responses, sensors) decCount, resCount = 0, 0 if ro.includeCha: decCount = self._copyReferences(newInv, req, objCount, self._inv, ro, dataloggers, sensors, self._maxObj) if decCount is None: return False else: resCount = newInv.responsePAZCount() + \ newInv.responseFIRCount() + \ newInv.responsePolynomialCount() + \ newInv.responseFAPCount() + \ newInv.responseIIRCount() objCount += resCount + decCount + newInv.dataloggerCount() + \ newInv.sensorCount() # Copy data extents objOut = newInv if len(extents) > 0: objCount += 1 da = DataModel.DataAvailability() for e in extents: da.add(DataModel.DataExtent(e)) objOut = ExportObjectList() objOut.append(newInv) objOut.append(da) sink = utils.Sink(req) if not exp.write(sink, objOut): return False Logging.debug("%s: returned %iNet, %iSta, %iLoc, %iCha, " \ "%iDL, %iDec, %iSen, %iRes, %iDAExt (total objects/" \ "bytes: %i/%i) " % (ro.service, newInv.networkCount(), staCount, locCount, chaCount, newInv.dataloggerCount(), decCount, newInv.sensorCount(), resCount, extCount, objCount, sink.written)) utils.accessLog(req, ro, http.OK, sink.written, None) return True
def getResourceFor(self, request): Logging.debug("request (%s): %s" % (request.getClientIP(), request.uri)) request.setHeader("Server", "SeisComP3-WS/%s" % self._version) return server.Site.getResourceFor(self, request)
def _processRequest(self, req, ro): if ro.quality != 'B' and ro.quality != 'M': msg = "quality other than 'B' or 'M' not supported" return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro) if ro.minimumLength: msg = "enforcing of minimum record length not supported" return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro) if ro.longestOnly: msg = "limitation to longest segment not supported" return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro) app = Application.Instance() ro._checkTimes(app._realtimeGap) maxSamples = None if app._samplesM is not None: maxSamples = app._samplesM * 1000000 samples = 0 trackerList = [] if app._trackdbEnabled or app._requestLog: xff = req.requestHeaders.getRawHeaders("x-forwarded-for") if xff: userIP = xff[0].split(",")[0].strip() else: userIP = req.getClientIP() clientID = req.getHeader("User-Agent") if clientID: clientID = clientID[:80] else: clientID = "fdsnws" if app._trackdbEnabled: if ro.userName: userID = ro.userName else: userID = app._trackdbDefaultUser reqID = 'ws' + str(int(round(time.time() * 1000) - 1420070400000)) tracker = RequestTrackerDB(clientID, app.connection(), reqID, "WAVEFORM", userID, "REQUEST WAVEFORM " + reqID, "fdsnws", userIP, req.getClientIP()) trackerList.append(tracker) if app._requestLog: tracker = app._requestLog.tracker(ro.service, ro.userName, userIP, clientID) trackerList.append(tracker) # Open record stream rs = _MyRecordStream(self._rsURL, trackerList, self.__bufferSize) forbidden = None # Add request streams # iterate over inventory networks for s in ro.streams: for net in self._networkIter(s): netRestricted = utils.isRestricted(net) if not trackerList and netRestricted and not self.__user: forbidden = forbidden or (forbidden is None) continue for sta in self._stationIter(net, s): staRestricted = utils.isRestricted(sta) if not trackerList and staRestricted and not self.__user: forbidden = forbidden or (forbidden is None) continue for loc in self._locationIter(sta, s): for cha in self._streamIter(loc, s): start_time = max(cha.start(), s.time.start) try: end_time = min(cha.end(), s.time.end) except ValueError: end_time = s.time.end if (netRestricted or staRestricted or utils.isRestricted(cha) ) and (not self.__user or (self.__access and not self.__access.authorize( self.__user, net.code(), sta.code(), loc.code(), cha.code(), start_time, end_time))): for tracker in trackerList: net_class = 't' if net.code()[0] \ in "0123456789XYZ" else 'p' tracker.line_status( start_time, end_time, net.code(), sta.code(), cha.code(), loc.code(), True, net_class, True, [], "fdsnws", "DENIED", 0, "") forbidden = forbidden or (forbidden is None) continue forbidden = False # enforce maximum sample per request restriction if maxSamples is not None: try: n = cha.sampleRateNumerator() d = cha.sampleRateDenominator() except ValueError: msg = "skipping stream without sampling " \ "rate definition: %s.%s.%s.%s" % ( net.code(), sta.code(), loc.code(), cha.code()) Logging.warning(msg) continue # calculate number of samples for requested # time window diffSec = (end_time - start_time).length() samples += int(diffSec * n / d) if samples > maxSamples: msg = "maximum number of %sM samples " \ "exceeded" % str(app._samplesM) return self.renderErrorPage( req, http.REQUEST_ENTITY_TOO_LARGE, msg, ro) Logging.debug( "adding stream: %s.%s.%s.%s %s - %s" % (net.code(), sta.code(), loc.code(), cha.code(), start_time.iso(), end_time.iso())) rs.addStream(net.code(), sta.code(), loc.code(), cha.code(), start_time, end_time, utils.isRestricted(cha), sta.archiveNetworkCode()) if forbidden: for tracker in trackerList: tracker.volume_status("fdsnws", "DENIED", 0, "") tracker.request_status("END", "") msg = "access denied" return self.renderErrorPage(req, http.FORBIDDEN, msg, ro) elif forbidden is None: for tracker in trackerList: tracker.volume_status("fdsnws", "NODATA", 0, "") tracker.request_status("END", "") msg = "no metadata found" return self.renderErrorPage(req, http.NO_CONTENT, msg, ro) # Build output filename fileName = Application.Instance()._fileNamePrefix.replace( "%time", time.strftime('%Y-%m-%dT%H:%M:%S')) + '.mseed' # Create producer for async IO prod = _WaveformProducer(req, ro, rs, fileName, trackerList) req.registerProducer(prod, True) prod.resumeProducing() # The request is handled by the deferred object return server.NOT_DONE_YET
def getResourceFor(self, request): Logging.debug("request (%s): %s" % (request.getClientIP(), request.uri)) request.setHeader('Server', "SeisComP3-FDSNWS/%s" % VERSION) return server.Site.getResourceFor(self, request)
def _processRequestExp(self, req, ro, exp, dac): if req._disconnected: return False staCount, locCount, chaCount, extCount, objCount = 0, 0, 0, 0, 0 DataModel.PublicObject.SetRegistrationEnabled(False) newInv = DataModel.Inventory() dataloggers, sensors, extents = set(), set(), set() skipRestricted = not self._allowRestricted or \ (ro.restricted is not None and not ro.restricted) levelNet = not ro.includeSta levelSta = ro.includeSta and not ro.includeCha # iterate over inventory networks for net in ro.networkIter(self._inv, levelNet): if req._disconnected: return False if skipRestricted and utils.isRestricted(net): continue newNet = DataModel.Network(net) # Copy comments for i in xrange(net.commentCount()): newNet.add(DataModel.Comment(net.comment(i))) # iterate over inventory stations of current network for sta in ro.stationIter(net, levelSta): if req._disconnected: return False if skipRestricted and utils.isRestricted(sta): continue if not self.checkObjects(req, objCount, self._maxObj): return False if ro.includeCha: numCha, numLoc, d, s, e = \ self._processStation(newNet, net, sta, ro, dac, skipRestricted) if numCha > 0: locCount += numLoc chaCount += numCha extCount += len(e) objCount += numLoc + numCha + extCount if not self.checkObjects(req, objCount, self._maxObj): return False dataloggers |= d sensors |= s extents |= e elif self._matchStation(net, sta, ro, dac): if ro.includeSta: newSta = DataModel.Station(sta) # Copy comments for i in xrange(sta.commentCount()): newSta.add(DataModel.Comment(sta.comment(i))) newNet.add(newSta) else: # no station output requested: one matching station # is sufficient to include the network newInv.add(newNet) objCount += 1 break if newNet.stationCount() > 0: newInv.add(newNet) staCount += newNet.stationCount() objCount += staCount + 1 # Return 204 if no matching inventory was found if newInv.networkCount() == 0: msg = "no matching inventory found" data = self.renderErrorPage(req, http.NO_CONTENT, msg, ro) if data: utils.writeTS(req, data) return True # Copy references (dataloggers, responses, sensors) decCount, resCount = 0, 0 if ro.includeCha: decCount = self._copyReferences(newInv, req, objCount, self._inv, ro, dataloggers, sensors, self._maxObj) if decCount is None: return False else: resCount = newInv.responsePAZCount() + \ newInv.responseFIRCount() + \ newInv.responsePolynomialCount() + \ newInv.responseFAPCount() + \ newInv.responseIIRCount() objCount += resCount + decCount + newInv.dataloggerCount() + \ newInv.sensorCount() # Copy data extents objOut = newInv if len(extents) > 0: objCount += 1 da = DataModel.DataAvailability() for e in extents: da.add(DataModel.DataExtent(e)) objOut = ExportObjectList() objOut.append(newInv) objOut.append(da) sink = utils.Sink(req) if not exp.write(sink, objOut): return False Logging.debug( "%s: returned %iNet, %iSta, %iLoc, %iCha, " "%iDL, %iDec, %iSen, %iRes, %iDAExt (total objects/" "bytes: %i/%i) " % (ro.service, newInv.networkCount(), staCount, locCount, chaCount, newInv.dataloggerCount(), decCount, newInv.sensorCount(), resCount, extCount, objCount, sink.written)) utils.accessLog(req, ro, http.OK, sink.written, None) return True
def _processRequestExp(self, req, ro, dbq, exp, ep): objCount = ep.eventCount() maxObj = Application.Instance()._queryObjects if not HTTP.checkObjects(req, objCount, maxObj): return False pickIDs = set() if ro.picks is None: ro.picks = True # add related information for iEvent in xrange(ep.eventCount()): if req._disconnected: return False e = ep.event(iEvent) if self._hideAuthor: self._removeAuthor(e) # eventDescriptions and comments objCount += dbq.loadEventDescriptions(e) if ro.comments: objCount += self._loadComment(dbq, e) if not HTTP.checkObjects(req, objCount, maxObj): return False # origin references: either all or preferred only dbIter = dbq.getObjects(e, DataModel.OriginReference.TypeInfo()) for obj in dbIter: oRef = DataModel.OriginReference.Cast(obj) if oRef is None: continue if ro.allOrigins: e.add(oRef) elif oRef.originID() == e.preferredOriginID(): e.add(oRef) dbIter.close() # TODO: if focal mechanisms are added make sure derived # origin is loaded objCount += e.originReferenceCount() if not HTTP.checkObjects(req, objCount, maxObj): return False # TODO: add focal mechanisms # origins for iORef in xrange(e.originReferenceCount()): if req._disconnected: return False oID = e.originReference(iORef).originID() obj = dbq.getObject(DataModel.Origin.TypeInfo(), oID) o = DataModel.Origin.Cast(obj) if o is None: continue ep.add(o) objCount += 1 if self._hideAuthor: self._removeAuthor(o) # comments if ro.comments: objCount += self._loadComments(dbq, o) if not HTTP.checkObjects(req, objCount, maxObj): return False # magnitudes dbIter = dbq.getObjects(oID, DataModel.Magnitude.TypeInfo()) for obj in dbIter: mag = DataModel.Magnitude.Cast(obj) if mag is None: continue if ro.allMags: o.add(mag) elif mag.publicID() == e.preferredMagnitudeID(): o.add(mag) dbIter.close() if self._hideAuthor: self._removeAuthor(mag) objCount += o.magnitudeCount() if ro.comments: for iMag in xrange(o.magnitudeCount()): objCount += self._loadComments(dbq, o.magnitude(iMag)) if not HTTP.checkObjects(req, objCount, maxObj): return False # TODO station magnitudes, amplitudes # - added pick id for each pick referenced by amplitude # arrivals if ro.arrivals: objCount += dbq.loadArrivals(o) if self._removeAuthor: for iArrival in xrange(o.arrivalCount()): self._removeAuthor(o.arrival(iArrival)) # collect pick IDs if requested if ro.picks: for iArrival in xrange(o.arrivalCount()): pickIDs.add(o.arrival(iArrival).pickID()) if not HTTP.checkObjects(req, objCount, maxObj): return False # picks if pickIDs: objCount += len(pickIDs) if not HTTP.checkObjects(req, objCount, maxObj): return False for pickID in pickIDs: obj = dbq.getObject(DataModel.Pick.TypeInfo(), pickID) pick = DataModel.Pick.Cast(obj) if pick is not None: if self._hideAuthor: self._removeAuthor(pick) if ro.comments: objCount += self._loadComments(dbq, pick) ep.add(pick) if not HTTP.checkObjects(req, objCount, maxObj): return False # write response sink = utils.Sink(req) if not exp.write(sink, ep): return False Logging.debug("%s: returned %i events and %i origins (total " \ "objects/bytes: %i/%i)" % (ro.service, ep.eventCount(), ep.originCount(), objCount, sink.written)) utils.accessLog(req, ro, http.OK, sink.written, None) return True