class FDSNEvent(resource.Resource): isLeaf = True #--------------------------------------------------------------------------- def __init__(self, hideAuthor=False, evaluationMode=None, eventTypeWhitelist=None, eventTypeBlacklist=None): self._hideAuthor = hideAuthor self._evaluationMode = evaluationMode self._eventTypeWhitelist = eventTypeWhitelist self._eventTypeBlacklist = eventTypeBlacklist #--------------------------------------------------------------------------- def render_GET(self, req): # Parse and validate GET parameters ro = _EventRequestOptions(req.args) try: ro.parse() except ValueError, e: Logging.warning(str(e)) return HTTP.renderErrorPage(req, http.BAD_REQUEST, str(e), ro) # Catalog filter is not supported, any filter value will result in 204 if ro.catalogs: msg = "no matching events found" return HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro) # updateafter not implemented if ro.updatedAfter: msg = "filtering based on update time not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) # Exporter, 'None' is used for text output if ro.format in ro.VText: exp = None else: exp = Exporter.Create(ro.Exporters[ro.format]) if exp: exp.setFormattedOutput(bool(ro.formatted)) else: msg = "output format '%s' no available, export module '%s' could " \ "not be loaded." % (ro.format, ro.Exporters[ro.format]) return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) # Create database query dbq = DataModel.DatabaseQuery(Application.Instance().database()) if dbq.hasError(): msg = "could not connect to database: %s" % dbq.errorMsg() return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) # Process request in separate thread d = deferToThread(self._processRequest, req, ro, dbq, exp) req.notifyFinish().addErrback(utils.onCancel, d) d.addBoth(utils.onFinish, req) # The request is handled by the deferred object return server.NOT_DONE_YET
def render_GET(self, req): # No archive no service if not os.path.isdir(self._sdsPath): msg = "SDS archive not found: %s" % self._sdsPath return HTTP.renderErrorPage(request, http.SERVICE_UNAVAILABLE, msg) # Parse and validate GET parameters try: ro = _DataSelectRequestOptions(req.args) ro.parse() except ValueError, e: Logging.warning(str(e)) return HTTP.renderErrorPage(req, http.BAD_REQUEST, str(e))
def resumeProducing(self): rec = None try: rec = self.rsInput.next() except Exception, e: Logging.warning("%s" % str(e)) # e.g. ArchiveException if not self.initialized: self.initialized = True # read first record to test if any data exists at all if not rec: msg = "No waveform data found" self.req.write(HTTP.renderErrorPage(self.req, http.NOT_FOUND, msg)) self.req.unregisterProducer() self.req.finish() return self.req.setHeader("Content-Type", "application/vnd.fdsn.mseed") self.req.setHeader("Content-Disposition", "attachment; filename=%s" % \ self.fileName) if not rec: self.req.unregisterProducer() self.req.finish() return self.req.write(rec.raw().str())
def _finish(self): if self.stopped: return if self.written == 0: msg = "no waveform data found" errorpage = HTTP.renderErrorPage(self.req, http.NO_CONTENT, msg, VERSION, self.ro) if errorpage: self.req.write(errorpage) for tracker in self.trackerList: tracker.volume_status("fdsnws", "NODATA", 0, "") tracker.request_status("END", "") else: Logging.debug("%s: returned %i bytes of mseed data" % (self.ro.service, self.written)) utils.accessLog(self.req, self.ro, http.OK, self.written, None) for tracker in self.trackerList: tracker.volume_status("fdsnws", "OK", self.written, "") tracker.request_status("END", "") self.req.unregisterProducer() self.req.finish()
def resumeProducing(self): rec = None try: rec = self.rsInput.next() except Exception, e: Logging.warning("%s" % str(e)) if self.written == 0: # read first record to test if any data exists at all if not rec: msg = "no waveform data found" data = HTTP.renderErrorPage(self.req, http.NO_CONTENT, msg, self.ro) if data: self.req.write(data) self.req.unregisterProducer() self.req.finish() return self.req.setHeader('Content-Type', 'application/vnd.fdsn.mseed') self.req.setHeader('Content-Disposition', "attachment; " \ "filename=%s" % self.fileName) if not rec: self.req.unregisterProducer() Logging.debug("%s: returned %i bytes of mseed data" % ( self.ro.service, self.written)) utils.accessLog(self.req, self.ro, http.OK, self.written, None) self.req.finish() return data = rec.raw().str() self.req.write(data) self.written += len(data)
def _processRequest(self, req, ro, dbq, exp): if req._disconnected: return False DataModel.PublicObject.SetRegistrationEnabled(False) # query event(s) ep = DataModel.EventParameters() if ro.eventIDs: for eID in ro.eventIDs: event = dbq.getEventByPublicID(eID) event = DataModel.Event.Cast(event) if event: ep.add(event) else: self._findEvents(ep, ro, dbq) if ep.eventCount() == 0: msg = "no matching events found" utils.writeTS(req, HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro)) return False Logging.debug("events found: %i" % ep.eventCount()) if ro.format == 'csv' or not exp: req.setHeader('Content-Type', 'text/plain') else: req.setHeader('Content-Type', 'application/xml') if exp: return self._processRequestExp(req, ro, dbq, exp, ep) return self._processRequestText(req, ro, dbq, ep)
def render_GET(self, req): # Parse and validate GET parameters ro = _EventRequestOptions(req.args) try: ro.parse() except ValueError, e: Logging.warning(str(e)) return HTTP.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)
class _WaveformProducer: def __init__(self, req, ro, rs, fileName, tracker): self.req = req self.ro = ro self.rs = rs # keep a reference to avoid crash self.rsInput = RecordInput(rs, Array.INT, Record.SAVE_RAW) self.fileName = fileName self.written = 0 self.tracker = tracker def resumeProducing(self): rec = None try: rec = self.rsInput.next() except Exception, e: Logging.warning("%s" % str(e)) if self.written == 0: # read first record to test if any data exists at all if not rec: msg = "no waveform data found" data = HTTP.renderErrorPage(self.req, http.NO_CONTENT, msg, self.ro) if data: self.req.write(data) self.req.unregisterProducer() self.req.finish() if self.tracker: self.tracker.volume_status("fdsnws", "NODATA", 0, "") self.tracker.request_status("END", "") return self.req.setHeader('Content-Type', 'application/vnd.fdsn.mseed') self.req.setHeader('Content-Disposition', "attachment; " \ "filename=%s" % self.fileName) if not rec: self.req.unregisterProducer() Logging.debug("%s: returned %i bytes of mseed data" % (self.ro.service, self.written)) utils.accessLog(self.req, self.ro, http.OK, self.written, None) self.req.finish() if self.tracker: self.tracker.volume_status("fdsnws", "OK", self.written, "") self.tracker.request_status("END", "") return data = rec.raw().str() self.req.write(data) self.written += len(data)
def render_POST(self, req): # Parse and validate POST parameters ro = _StationRequestOptions() try: ro.parsePOST(req.content) ro.parse() except ValueError, e: Logging.warning(str(e)) return HTTP.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)
def _processRequest(self, req, ro, dbq, exp): if req._disconnected: return False DataModel.PublicObject.SetRegistrationEnabled(False) # query event(s) ep = DataModel.EventParameters() if ro.eventIDs: for eID in ro.eventIDs: obj = dbq.getEventByPublicID(eID) e = DataModel.Event.Cast(obj) if not e: continue if self._eventTypeWhitelist or self._eventTypeBlacklist: eType = None try: eType = DataModel.EEventTypeNames_name(e.type()) except ValueError: pass if self._eventTypeWhitelist and \ not eType in self._eventTypeWhitelist: continue if self._eventTypeBlacklist and \ eType in self._eventTypeBlacklist: continue if self._evaluationMode is not None: obj = dbq.getObject(DataModel.Origin.TypeInfo(), e.preferredOriginID()) o = DataModel.Origin.Cast(obj) try: if o is None or \ o.evaluationMode() != self._evaluationMode: continue except ValueError: continue ep.add(e) else: self._findEvents(ep, ro, dbq) if ep.eventCount() == 0: msg = "no matching events found" data = HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro) if data: utils.writeTS(req, data) return True Logging.debug("events found: %i" % ep.eventCount()) if ro.format == 'csv' or not exp: req.setHeader('Content-Type', 'text/plain') else: req.setHeader('Content-Type', 'application/xml') if exp: return self._processRequestExp(req, ro, dbq, exp, ep) return self._processRequestText(req, ro, dbq, ep)
def checkObjects(req, objCount, maxObj): if objCount > maxObj: msg = "The result set of your request exceeds the configured maximum " \ "number of objects (%i). Refine your request parameters." % maxObj writeTS(req, HTTP.renderErrorPage(req, http.REQUEST_ENTITY_TOO_LARGE, msg)) return False return True
def _processRequest(self, req, ro, dbq, exp): if req._disconnected: return False DataModel.PublicObject.SetRegistrationEnabled(False) # query event(s) ep = DataModel.EventParameters() if ro.eventIDs: for eID in ro.eventIDs: obj = dbq.getEventByPublicID(eID) e = DataModel.Event.Cast(obj) if not e: continue if self._eventTypeWhitelist or self._eventTypeBlacklist: eType = None try: eType = DataModel.EEventTypeNames_name(e.type()) except ValueException: pass if self._eventTypeWhitelist and \ not eType in self._eventTypeWhitelist: continue if self._eventTypeBlacklist and \ eType in self._eventTypeBlacklist: continue if self._evaluationMode is not None: obj = dbq.getObject(DataModel.Origin.TypeInfo(), e.preferredOriginID()) o = DataModel.Origin.Cast(obj) try: if o is None or \ o.evaluationMode() != self._evaluationMode: continue except ValueException: continue ep.add(e) else: self._findEvents(ep, ro, dbq) if ep.eventCount() == 0: msg = "no matching events found" data = HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro) if data: utils.writeTS(req, data) return True Logging.debug("events found: %i" % ep.eventCount()) if ro.format == 'csv' or not exp: req.setHeader('Content-Type', 'text/plain') else: req.setHeader('Content-Type', 'application/xml') if exp: return self._processRequestExp(req, ro, dbq, exp, ep) return self._processRequestText(req, ro, dbq, ep)
def render_POST(self, req): # Parse and validate POST parameters ro = _DataSelectRequestOptions() ro.userName = self.__user and self.__user.get('mail') try: ro.parsePOST(req.content) ro.parse() except ValueError, e: Logging.warning(str(e)) return HTTP.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)
def render_GET(self, req): # Parse and validate GET parameters ro = _StationRequestOptions(req.args) try: ro.parse() # the GET operation supports exactly one stream filter ro.streams.append(ro) except ValueError, e: Logging.warning(str(e)) return HTTP.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)
def render_GET(self, req): # Parse and validate POST parameters ro = _DataSelectRequestOptions(req.args) ro.userName = self.__user and self.__user.get('mail') try: ro.parse() # the GET operation supports exactly one stream filter ro.streams.append(ro) except ValueError, e: Logging.warning(str(e)) return HTTP.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)
def _finish(self): if self.written == 0: msg = "no waveform data found" HTTP.renderErrorPage(self.req, http.NO_CONTENT, msg, self.ro) if self.tracker: self.tracker.volume_status("fdsnws", "NODATA", 0, "") self.tracker.request_status("END", "") else: Logging.debug("%s: returned %i bytes of mseed data" % ( self.ro.service, self.written)) utils.accessLog(self.req, self.ro, http.OK, self.written, None) if self.tracker: self.tracker.volume_status("fdsnws", "OK", self.written, "") self.tracker.request_status("END", "") self.req.unregisterProducer() self.req.finish()
def _prepareRequest(self, req, ro): if ro.availability: msg = "including of availability information not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.updatedAfter: msg = "filtering based on update time not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.matchTimeSeries: msg = "filtering based on available time series not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) # Exporter, 'None' is used for text output if ro.format in ro.VText: if ro.includeRes: msg = "response level output not available in text format" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) req.setHeader('Content-Type', 'text/plain') d = deferToThread(self._processRequestText, req, ro) else: exp = Exporter.Create(ro.Exporters[ro.format]) if exp is None: msg = "output format '%s' no available, export module '%s' " \ "could not be loaded." % ( ro.format, ro.Exporters[ro.format]) return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) req.setHeader('Content-Type', 'application/xml') exp.setFormattedOutput(bool(ro.formatted)) d = deferToThread(self._processRequestExp, req, ro, exp) # Process request in separate thread d.addCallback(utils.onRequestServed, req) d.addErrback(utils.onRequestError, req) # The request is handled by the deferred object return server.NOT_DONE_YET
def _prepareRequest(self, req, ro): if ro.availability: msg = "including of availability information not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.updatedAfter: msg = "filtering based on update time not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.matchTimeSeries: msg = "filtering based on available time series not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) # Exporter, 'None' is used for text output if ro.format in ro.VText: if ro.includeRes: msg = "response level output not available in text format" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) req.setHeader('Content-Type', 'text/plain') d = deferToThread(self._processRequestText, req, ro) else: exp = Exporter.Create(ro.Exporters[ro.format]) if exp is None: msg = "output format '%s' no available, export module '%s' " \ "could not be loaded." % ( ro.format, ro.Exporters[ro.format]) return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) req.setHeader('Content-Type', 'application/xml') exp.setFormattedOutput(bool(ro.formatted)) d = deferToThread(self._processRequestExp, req, ro, exp) req.notifyFinish().addErrback(utils.onCancel, d) d.addBoth(utils.onFinish, req) # The request is handled by the deferred object return server.NOT_DONE_YET
def _prepareRequest(self, req, ro): if ro.availability and not self._daEnabled: msg = "including of availability information not supported" return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro) if ro.updatedAfter: msg = "filtering based on update time not supported" return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro) if ro.matchTimeSeries and not self._daEnabled: msg = "filtering based on available time series not supported" return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro) # load data availability if requested dac = None if ro.availability or ro.matchTimeSeries: dac = Application.Instance().getDACache() if dac is None or len(dac.extents()) == 0: msg = "no data availabiltiy extent information found" return HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro) # Exporter, 'None' is used for text output if ro.format in ro.VText: if ro.includeRes: msg = "response level output not available in text format" return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro) req.setHeader('Content-Type', 'text/plain') d = deferToThread(self._processRequestText, req, ro, dac) else: exp = Exporter.Create(ro.Exporters[ro.format]) if exp is None: msg = "output format '%s' no available, export module '%s' " \ "could not be loaded." % ( ro.format, ro.Exporters[ro.format]) return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro) req.setHeader('Content-Type', 'application/xml') exp.setFormattedOutput(bool(ro.formatted)) d = deferToThread(self._processRequestExp, req, ro, exp, dac) req.notifyFinish().addErrback(utils.onCancel, d) d.addBoth(utils.onFinish, req) # The request is handled by the deferred object return server.NOT_DONE_YET
class FDSNEvent(resource.Resource): isLeaf = True #--------------------------------------------------------------------------- def __init__(self, hideAuthor = False, evaluationMode = None, eventTypeWhitelist = None, eventTypeBlacklist = None, formatList = None): self._hideAuthor = hideAuthor self._evaluationMode = evaluationMode self._eventTypeWhitelist = eventTypeWhitelist self._eventTypeBlacklist = eventTypeBlacklist self._formatList = formatList #--------------------------------------------------------------------------- def render_OPTIONS(self, req): req.setHeader('Access-Control-Allow-Origin', '*') req.setHeader('Access-Control-Allow-Methods', 'GET, POST, OPTIONS') req.setHeader('Access-Control-Allow-Headers', 'Accept, Content-Type, X-Requested-With, Origin') req.setHeader('Content-Type', 'text/plain') return "" #--------------------------------------------------------------------------- def render_GET(self, req): # Parse and validate GET parameters ro = _EventRequestOptions(req.args) try: ro.parse() except ValueError, e: Logging.warning(str(e)) return HTTP.renderErrorPage(req, http.BAD_REQUEST, str(e), ro) # Catalog filter is not supported if ro.catalogs: msg = "catalog filter not supported" return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro) # updateafter not implemented if ro.updatedAfter: msg = "filtering based on update time not supported" return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro) if self._formatList is not None and ro.format not in self._formatList: msg = "output format '%s' not available" % ro.format return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro) # Exporter, 'None' is used for text output if ro.format in ro.VText: exp = None else: exp = Exporter.Create(ro.Exporters[ro.format]) if exp: exp.setFormattedOutput(bool(ro.formatted)) else: msg = "output format '%s' not available, export module '%s' could " \ "not be loaded." % (ro.format, ro.Exporters[ro.format]) return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro) # Create database query db = DatabaseInterface.Open(Application.Instance().databaseURI()) if db is None: msg = "could not connect to database: %s" % dbq.errorMsg() return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) dbq = DataModel.DatabaseQuery(db) # Process request in separate thread d = deferToThread(self._processRequest, req, ro, dbq, exp) req.notifyFinish().addErrback(utils.onCancel, d) d.addBoth(utils.onFinish, req) # The request is handled by the deferred object return server.NOT_DONE_YET
def _processRequest(self, req, ro): if ro.quality != 'B' and ro.quality != 'M': msg = "quality other than 'B' or 'M' not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.minimumLength: msg = "enforcing of minimum record length not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.longestOnly: msg = "limitation to longest segment not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) app = Application.Instance() ro._checkTimes(app._realtimeGap) # Open record stream rs = RecordStream.Open(self._rsURL) if rs is None: msg = "could not open record stream" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) maxSamples = None if app._samplesM is not None: maxSamples = app._samplesM * 1000000 samples = 0 app = Application.Instance() if app._trackdbEnabled: userid = ro.userName or app._trackdbDefaultUser reqid = 'ws' + str(int(round(time.time() * 1000) - 1420070400000)) tracker = RequestTrackerDB("fdsnws", app.connection(), reqid, "WAVEFORM", userid, "REQUEST WAVEFORM " + reqid, "fdsnws", req.getClientIP(), req.getClientIP()) else: tracker = None # Add request streams # iterate over inventory networks for s in ro.streams: for net in self._networkIter(s): for sta in self._stationIter(net, s): for loc in self._locationIter(sta, s): for cha in self._streamIter(loc, s): if utils.isRestricted(cha) and (self.__user is None or \ not self.__access.authorize(self.__user, net.code(), sta.code(), loc.code(), cha.code(), s.time.start, s.time.end)): continue # enforce maximum sample per request restriction if maxSamples is not None: try: n = cha.sampleRateNumerator() d = cha.sampleRateDenominator() except ValueException: msg = "skipping stream without sampling " \ "rate definition: %s.%s.%s.%s" % ( net.code(), sta.code(), loc.code(), cha.code()) Logging.warning(msg) continue # calculate number of samples for requested # time window diffSec = (s.time.end - s.time.start).length() samples += int(diffSec * n / d) if samples > maxSamples: msg = "maximum number of %sM samples " \ "exceeded" % str(app._samplesM) return HTTP.renderErrorPage(req, http.REQUEST_ENTITY_TOO_LARGE, msg, ro) Logging.debug("adding stream: %s.%s.%s.%s %s - %s" \ % (net.code(), sta.code(), loc.code(), cha.code(), s.time.start.iso(), s.time.end.iso())) rs.addStream(net.code(), sta.code(), loc.code(), cha.code(), s.time.start, s.time.end) if tracker: tracker.line_status(s.time.start, s.time.end, net.code(), sta.code(), cha.code(), loc.code(), False, "", True, [], "fdsnws", "OK", 0, "") # Build output filename fileName = Application.Instance()._fileNamePrefix.replace("%time", time.strftime('%Y-%m-%dT%H:%M:%S'))+'.mseed' # Create producer for async IO req.registerProducer(_WaveformProducer(req, ro, rs, fileName, tracker), False) # The request is handled by the deferred object return server.NOT_DONE_YET
def _processRequest(self, req, ro): if ro.quality != 'B' and ro.quality != 'M': msg = "quality other than 'B' or 'M' not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.minimumLength: msg = "enforcing of minimum record length not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.longestOnly: msg = "limitation to longest segment not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) app = Application.Instance() ro._checkTimes(app._realtimeGap) # Open record stream rs = RecordStream.Open(self._rsURL) if rs is None: msg = "could not open record stream" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) maxSamples = None if app._samplesM is not None: maxSamples = app._samplesM * 1000000 samples = 0 # Add request streams # iterate over inventory networks inv = Application.Instance()._inv for s in ro.streams: for net in self._networkIter(inv, s): if ro.userName is None and utils.isRestricted(net): continue for sta in self._stationIter(net, s): if ro.userName is None and utils.isRestricted(sta): continue for loc in self._locationIter(sta, s): for cha in self._streamIter(loc, s): # enforce maximum sample per request restriction if maxSamples is not None: try: n = cha.sampleRateNumerator() d = cha.sampleRateDenominator() except ValueException: msg = "skipping stream without sampling " \ "rate definition: %s.%s.%s.%s" % ( net.code(), sta.code(), loc.code(), cha.code()) Logging.warning(msg) continue # calculate number of samples for requested # time window diffSec = (s.time.end - s.time.start).length() samples += int(diffSec * n / d) if samples > maxSamples: msg = "maximum number of %sM samples " \ "exceeded" % str(app._samplesM) return HTTP.renderErrorPage( req, http.REQUEST_ENTITY_TOO_LARGE, msg, ro) Logging.debug("adding stream: %s.%s.%s.%s %s - %s" \ % (net.code(), sta.code(), loc.code(), cha.code(), s.time.start.iso(), s.time.end.iso())) rs.addStream(net.code(), sta.code(), loc.code(), cha.code(), s.time.start, s.time.end) # Build output filename fileName = Application.Instance()._fileNamePrefix + '.mseed' # Create producer for async IO req.registerProducer(_WaveformProducer(req, ro, rs, fileName), False) # The request is handled by the deferred object return server.NOT_DONE_YET
def _processRequestExp(self, req, ro, exp): if req._disconnected: return False staCount, locCount, chaCount, objCount = 0, 0, 0, 0 DataModel.PublicObject.SetRegistrationEnabled(False) newInv = DataModel.Inventory() dataloggers, sensors = set(), set() skipRestricted = not self._allowRestricted or (ro.restricted is not None and not ro.restricted) levelNet = not ro.includeSta levelSta = ro.includeSta and not ro.includeCha # iterate over inventory networks for net in ro.networkIter(self._inv, levelNet): if req._disconnected: return False if skipRestricted and utils.isRestricted(net): continue newNet = DataModel.Network(net) # iterate over inventory stations of current network for sta in ro.stationIter(net, levelSta): if req._disconnected: return False if skipRestricted and utils.isRestricted(sta): continue if not HTTP.checkObjects(req, objCount, self._maxObj): return False if ro.includeCha: numCha, numLoc, d, s = self._processStation(newNet, sta, ro, skipRestricted) if numCha > 0: locCount += numLoc chaCount += numCha objCount += numLoc + numCha if not HTTP.checkObjects(req, objCount, self._maxObj): return False dataloggers |= d sensors |= s elif self._matchStation(sta, ro): if ro.includeSta: newNet.add(DataModel.Station(sta)) else: # no station output requested: one matching station # is sufficient to include the network newInv.add(newNet) objCount += 1 break if newNet.stationCount() > 0: newInv.add(newNet) staCount += newNet.stationCount() objCount += staCount + 1 # Return 204 if no matching inventory was found if newInv.networkCount() == 0: msg = "no matching inventory found" data = HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro) if data: utils.writeTS(req, data) return True # Copy references (dataloggers, responses, sensors) decCount, resCount = 0, 0 if ro.includeCha: decCount = self._copyReferences(newInv, req, objCount, self._inv, ro, dataloggers, sensors, self._maxObj) if decCount is None: return False else: resCount = newInv.responsePAZCount() + newInv.responseFIRCount() + newInv.responsePolynomialCount() objCount += resCount + decCount + newInv.dataloggerCount() + newInv.sensorCount() sink = utils.Sink(req) if not exp.write(sink, newInv): return False Logging.notice( "%s: returned %iNet, %iSta, %iLoc, %iCha, " "%iDL, %iDec, %iSen, %iRes (total objects/bytes: " "%i/%i) " % ( ro.service, newInv.networkCount(), staCount, locCount, chaCount, newInv.dataloggerCount(), decCount, newInv.sensorCount(), resCount, objCount, sink.written, ) ) utils.accessLog(req, ro, http.OK, sink.written, None) return True
def _processRequest(self, req, ro): if ro.quality != 'B' and ro.quality != 'M': msg = "quality other than 'B' or 'M' not supported" return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro) if ro.minimumLength: msg = "enforcing of minimum record length not supported" return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro) if ro.longestOnly: msg = "limitation to longest segment not supported" return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro) app = Application.Instance() ro._checkTimes(app._realtimeGap) maxSamples = None if app._samplesM is not None: maxSamples = app._samplesM * 1000000 samples = 0 app = Application.Instance() if app._trackdbEnabled: userid = ro.userName or app._trackdbDefaultUser reqid = 'ws' + str(int(round(time.time() * 1000) - 1420070400000)) xff = req.requestHeaders.getRawHeaders("x-forwarded-for") if xff: userIP = xff[0].split(",")[0].strip() else: userIP = req.getClientIP() tracker = RequestTrackerDB("fdsnws", app.connection(), reqid, "WAVEFORM", userid, "REQUEST WAVEFORM " + reqid, "fdsnws", userIP, req.getClientIP()) else: tracker = None # Open record stream rs = _MyRecordStream(self._rsURL, tracker, self.__bufferSize) forbidden = None # Add request streams # iterate over inventory networks for s in ro.streams: for net in self._networkIter(s): for sta in self._stationIter(net, s): for loc in self._locationIter(sta, s): for cha in self._streamIter(loc, s): try: start_time = max(cha.start(), s.time.start) except Exception: start_time = s.time.start try: end_time = min(cha.end(), s.time.end) except Exception: end_time = s.time.end if utils.isRestricted(cha) and \ (not self.__user or (self.__access and not self.__access.authorize(self.__user, net.code(), sta.code(), loc.code(), cha.code(), start_time, end_time))): if tracker: net_class = 't' if net.code( )[0] in "0123456789XYZ" else 'p' tracker.line_status( start_time, end_time, net.code(), sta.code(), cha.code(), loc.code(), True, net_class, True, [], "fdsnws", "DENIED", 0, "") forbidden = forbidden or (forbidden is None) continue forbidden = False # enforce maximum sample per request restriction if maxSamples is not None: try: n = cha.sampleRateNumerator() d = cha.sampleRateDenominator() except ValueError: msg = "skipping stream without sampling " \ "rate definition: %s.%s.%s.%s" % ( net.code(), sta.code(), loc.code(), cha.code()) Logging.warning(msg) continue # calculate number of samples for requested # time window diffSec = (end_time - start_time).length() samples += int(diffSec * n / d) if samples > maxSamples: msg = "maximum number of %sM samples " \ "exceeded" % str(app._samplesM) return HTTP.renderErrorPage( req, http.REQUEST_ENTITY_TOO_LARGE, msg, ro) Logging.debug("adding stream: %s.%s.%s.%s %s - %s" \ % (net.code(), sta.code(), loc.code(), cha.code(), start_time.iso(), end_time.iso())) rs.addStream(net.code(), sta.code(), loc.code(), cha.code(), start_time, end_time, utils.isRestricted(cha), sta.archiveNetworkCode()) if forbidden: if tracker: tracker.volume_status("fdsnws", "NODATA", 0, "") tracker.request_status("END", "") msg = "access denied" return HTTP.renderErrorPage(req, http.FORBIDDEN, msg, ro) elif forbidden is None: if tracker: tracker.volume_status("fdsnws", "NODATA", 0, "") tracker.request_status("END", "") msg = "no metadata found" return HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro) # Build output filename fileName = Application.Instance()._fileNamePrefix.replace( "%time", time.strftime('%Y-%m-%dT%H:%M:%S')) + '.mseed' # Create producer for async IO prod = _WaveformProducer(req, ro, rs, fileName, tracker) req.registerProducer(prod, True) prod.resumeProducing() # The request is handled by the deferred object return server.NOT_DONE_YET
def _processRequest(self, req, ro, exp): if req._disconnected: return False DataModel.PublicObject.SetRegistrationEnabled(False) maxObj = Application.Instance()._queryObjects staCount, locCount, chaCount, objCount = 0, 0, 0, 0 DataModel.PublicObject.SetRegistrationEnabled(False) inv = Application.Instance()._inv newInv = DataModel.Inventory() filterChannel = ro.channel and (ro.channel.loc or ro.channel.cha) dataloggers, sensors = set(), set() # iterate over inventory networks for net in utils.networkIter(inv, ro): if not ro.restricted and net.restricted(): continue newNet = DataModel.Network(net) # iterate over inventory stations of current network for sta in utils.stationIter(net, ro, matchGeo=True): if not ro.restricted and sta.restricted(): continue if not HTTP.checkObjects(req, objCount, maxObj): return False if ro.includeCha: numCha, numLoc, d, s = self._processStation(newNet, sta, ro) if numCha > 0: locCount += numLoc chaCount += numCha objCount += numLoc + numCha if not HTTP.checkObjects(req, objCount, maxObj): return False dataloggers |= d sensors |= s elif self._matchStation(sta, ro): if ro.includeSta: newNet.add(DataModel.Station(sta)) else: # no station output requested: one matching station is # sufficient to include the network newInv.add(newNet) objCount += 1 break if newNet.stationCount() > 0: newInv.add(newNet) staCount += newNet.stationCount() objCount += staCount + 1 # Return 204 if no matching inventory was found if newInv.networkCount() == 0: utils.writeTS(req, HTTP.renderErrorPage(req, http.NO_CONTENT, "No matching inventory found", ro)) return False # Copy references (dataloggers, responses, sensors) decCount, resCount = 0, 0 if ro.includeCha: decCount = self._copyReferences(newInv, req, objCount, inv, ro, dataloggers, sensors) if decCount is None: return False else: resCount = newInv.responsePAZCount() + \ newInv.responseFIRCount() + \ newInv.responsePolynomialCount() objCount += resCount + decCount + newInv.dataloggerCount() + \ newInv.sensorCount() req.setHeader("Content-Type", "application/xml") sink = utils.Sink(req) if not exp.write(sink, newInv): return False Logging.notice("%s: returned %iNet, %iSta, %iLoc, %iCha, " \ "%iDL, %iDec, %iSen, %iRes (total objects/bytes: " \ "%i/%i) " % (ro.service, newInv.networkCount(), staCount, locCount, chaCount, newInv.dataloggerCount(), decCount, newInv.sensorCount(), resCount, objCount, sink.written)) utils.accessLog(req, ro, http.OK, sink.written, None) return True
def _processRequest(self, req, ro): if ro.quality != 'B' and ro.quality != 'M': msg = "quality other than 'B' or 'M' not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.minimumLength: msg = "enforcing of minimum record length not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.longestOnly: msg = "limitation to longest segment not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) app = Application.Instance() ro._checkTimes(app._realtimeGap) # Open record stream rs = RecordStream.Open(self._rsURL) if rs is None: msg = "could not open record stream" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) maxSamples = None if app._samplesM is not None: maxSamples = app._samplesM * 1000000 samples = 0 app = Application.Instance() if app._trackdbEnabled: userid = ro.userName or app._trackdbDefaultUser reqid = 'ws' + str(int(round(time.time() * 1000) - 1420070400000)) tracker = RequestTrackerDB("fdsnws", app.connection(), reqid, "WAVEFORM", userid, "REQUEST WAVEFORM " + reqid, "fdsnws", req.getClientIP(), req.getClientIP()) else: tracker = None # Add request streams # iterate over inventory networks for s in ro.streams: for net in self._networkIter(s): for sta in self._stationIter(net, s): for loc in self._locationIter(sta, s): for cha in self._streamIter(loc, s): if utils.isRestricted(cha) and \ (not self.__user or (self.__access and not self.__access.authorize(self.__user, net.code(), sta.code(), loc.code(), cha.code(), s.time.start, s.time.end))): continue # enforce maximum sample per request restriction if maxSamples is not None: try: n = cha.sampleRateNumerator() d = cha.sampleRateDenominator() except ValueException: msg = "skipping stream without sampling " \ "rate definition: %s.%s.%s.%s" % ( net.code(), sta.code(), loc.code(), cha.code()) Logging.warning(msg) continue # calculate number of samples for requested # time window diffSec = (s.time.end - s.time.start).length() samples += int(diffSec * n / d) if samples > maxSamples: msg = "maximum number of %sM samples " \ "exceeded" % str(app._samplesM) return HTTP.renderErrorPage( req, http.REQUEST_ENTITY_TOO_LARGE, msg, ro) Logging.debug("adding stream: %s.%s.%s.%s %s - %s" \ % (net.code(), sta.code(), loc.code(), cha.code(), s.time.start.iso(), s.time.end.iso())) rs.addStream(net.code(), sta.code(), loc.code(), cha.code(), s.time.start, s.time.end) if tracker: tracker.line_status(s.time.start, s.time.end, net.code(), sta.code(), cha.code(), loc.code(), False, "", True, [], "fdsnws", "OK", 0, "") # Build output filename fileName = Application.Instance()._fileNamePrefix.replace( "%time", time.strftime('%Y-%m-%dT%H:%M:%S')) + '.mseed' # Create producer for async IO req.registerProducer(_WaveformProducer(req, ro, rs, fileName, tracker), False) # The request is handled by the deferred object return server.NOT_DONE_YET
def _processRequest(self, req, ro, dbq, exp): if req._disconnected: return False DataModel.PublicObject.SetRegistrationEnabled(False) maxObj = Application.Instance()._queryObjects # query event(s) ep = DataModel.EventParameters() if ro.eventIDs: for eID in ro.eventIDs: event = dbq.getEventByPublicID(eID) event = DataModel.Event.Cast(event) if event: ep.add(event) else: self._findEvents(ep, ro, dbq) if ep.eventCount() == 0: msg = "No matching events found" utils.writeTS(req, HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro)) return False objCount = ep.eventCount() Logging.debug("events found: %i" % objCount) if not HTTP.checkObjects(req, objCount, maxObj): return False pickIDs = set() # add related information for iEvent in xrange(ep.eventCount()): e = ep.event(iEvent) eID = e.publicID() # eventDescriptions and comments objCount += dbq.loadEventDescriptions(e) if ro.comments: objCount += dbq.loadComments(e) if not HTTP.checkObjects(req, objCount, maxObj): return False # origin references: either all or preferred only dbIter = dbq.getObjects(e, DataModel.OriginReference.TypeInfo()) for obj in dbIter: oRef = DataModel.OriginReference.Cast(obj) if oRef is None: continue if ro.allOrigins: e.add(oRef) elif oRef.originID() == e.preferredOriginID(): e.add(oRef) dbIter.close() objCount += e.originReferenceCount() # TODO: load FocalMechanismReferences??? if not HTTP.checkObjects(req, objCount, maxObj): return False # origins for iORef in xrange(e.originReferenceCount()): oID = e.originReference(iORef).originID() obj = dbq.getObject(DataModel.Origin.TypeInfo(), oID) o = DataModel.Origin.Cast(obj) if o is None: continue ep.add(o) objCount += 1 # comments if ro.comments: objCount += dbq.loadComments(o) if not HTTP.checkObjects(req, objCount, maxObj): return False # magnitudes dbIter = dbq.getObjects(oID, DataModel.Magnitude.TypeInfo()) for obj in dbIter: mag = DataModel.Magnitude.Cast(obj) if mag is None: continue if ro.allMags: o.add(mag) elif mag.publicID() == e.preferredMagnitudeID(): o.add(mag) dbIter.close() objCount += o.magnitudeCount() if ro.comments: for iMag in xrange(o.magnitudeCount()): objCount += dbq.loadComments(o.magnitude(iMag)) if not HTTP.checkObjects(req, objCount, maxObj): return False # arrivals if ro.arrivals: objCount += dbq.loadArrivals(o) # collect pick IDs if requested if ro.picks: for iArrival in xrange(o.arrivalCount()): pickIDs.add(o.arrival(iArrival).pickID()) if not HTTP.checkObjects(req, objCount, maxObj): return False # picks if pickIDs: objCount += len(pickIDs) if not HTTP.checkObjects(req, objCount, maxObj): return False for pickID in pickIDs: obj = dbq.getObject(DataModel.Pick.TypeInfo(), pickID) pick = DataModel.Pick.Cast(obj) if pick is not None: ep.add(pick) if ro.output == "csv": req.setHeader("Content-Type", "text/plain") else: req.setHeader("Content-Type", "application/xml") sink = utils.Sink(req) if not exp.write(sink, ep): return False Logging.notice("%s: returned %i events and %i origins (total " \ "objects/bytes: %i/%i)" % (ro.service, ep.eventCount(), ep.originCount(), objCount, sink.written)) utils.accessLog(req, ro, http.OK, sink.written, None) return True
def _processRequest(self, req, ro): if ro.quality != 'B' and ro.quality != 'M': msg = "quality other than 'B' or 'M' not supported" return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro) if ro.minimumLength: msg = "enforcing of minimum record length not supported" return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro) if ro.longestOnly: msg = "limitation to longest segment not supported" return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro) app = Application.Instance() ro._checkTimes(app._realtimeGap) maxSamples = None if app._samplesM is not None: maxSamples = app._samplesM * 1000000 samples = 0 app = Application.Instance() if app._trackdbEnabled: userid = ro.userName or app._trackdbDefaultUser reqid = 'ws' + str(int(round(time.time() * 1000) - 1420070400000)) xff = req.requestHeaders.getRawHeaders("x-forwarded-for") if xff: userIP = xff[0].split(",")[0].strip() else: userIP = req.getClientIP() tracker = RequestTrackerDB("fdsnws", app.connection(), reqid, "WAVEFORM", userid, "REQUEST WAVEFORM " + reqid, "fdsnws", userIP, req.getClientIP()) else: tracker = None # Open record stream rs = _MyRecordStream(self._rsURL, tracker, self.__bufferSize) forbidden = None # Add request streams # iterate over inventory networks for s in ro.streams: for net in self._networkIter(s): for sta in self._stationIter(net, s): for loc in self._locationIter(sta, s): for cha in self._streamIter(loc, s): try: start_time = max(cha.start(), s.time.start) except Exception: start_time = s.time.start try: end_time = min(cha.end(), s.time.end) except Exception: end_time = s.time.end if utils.isRestricted(cha) and \ (not self.__user or (self.__access and not self.__access.authorize(self.__user, net.code(), sta.code(), loc.code(), cha.code(), start_time, end_time))): if tracker: net_class = 't' if net.code()[0] in "0123456789XYZ" else 'p' tracker.line_status(start_time, end_time, net.code(), sta.code(), cha.code(), loc.code(), True, net_class, True, [], "fdsnws", "DENIED", 0, "") forbidden = forbidden or (forbidden is None) continue forbidden = False # enforce maximum sample per request restriction if maxSamples is not None: try: n = cha.sampleRateNumerator() d = cha.sampleRateDenominator() except ValueError: msg = "skipping stream without sampling " \ "rate definition: %s.%s.%s.%s" % ( net.code(), sta.code(), loc.code(), cha.code()) Logging.warning(msg) continue # calculate number of samples for requested # time window diffSec = (end_time - start_time).length() samples += int(diffSec * n / d) if samples > maxSamples: msg = "maximum number of %sM samples " \ "exceeded" % str(app._samplesM) return HTTP.renderErrorPage(req, http.REQUEST_ENTITY_TOO_LARGE, msg, ro) Logging.debug("adding stream: %s.%s.%s.%s %s - %s" \ % (net.code(), sta.code(), loc.code(), cha.code(), start_time.iso(), end_time.iso())) rs.addStream(net.code(), sta.code(), loc.code(), cha.code(), start_time, end_time, utils.isRestricted(cha), sta.archiveNetworkCode()) if forbidden: if tracker: tracker.volume_status("fdsnws", "NODATA", 0, "") tracker.request_status("END", "") msg = "access denied" return HTTP.renderErrorPage(req, http.FORBIDDEN, msg, ro) elif forbidden is None: if tracker: tracker.volume_status("fdsnws", "NODATA", 0, "") tracker.request_status("END", "") msg = "no metadata found" return HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro) # Build output filename fileName = Application.Instance()._fileNamePrefix.replace("%time", time.strftime('%Y-%m-%dT%H:%M:%S')) + '.mseed' # Create producer for async IO prod = _WaveformProducer(req, ro, rs, fileName, tracker) req.registerProducer(prod, True) prod.resumeProducing() # The request is handled by the deferred object return server.NOT_DONE_YET
def _processRequestExp(self, req, ro, exp, dac): if req._disconnected: return False staCount, locCount, chaCount, extCount, objCount = 0, 0, 0, 0, 0 DataModel.PublicObject.SetRegistrationEnabled(False) newInv = DataModel.Inventory() dataloggers, sensors, extents = set(), set(), set() skipRestricted = not self._allowRestricted or \ (ro.restricted is not None and not ro.restricted) levelNet = not ro.includeSta levelSta = ro.includeSta and not ro.includeCha # iterate over inventory networks for net in ro.networkIter(self._inv, levelNet): if req._disconnected: return False if skipRestricted and utils.isRestricted(net): continue newNet = DataModel.Network(net) # Copy comments for i in xrange(net.commentCount()): newNet.add(DataModel.Comment(net.comment(i))) # iterate over inventory stations of current network for sta in ro.stationIter(net, levelSta): if req._disconnected: return False if skipRestricted and utils.isRestricted(sta): continue if not HTTP.checkObjects(req, objCount, self._maxObj): return False if ro.includeCha: numCha, numLoc, d, s, e = \ self._processStation(newNet, net, sta, ro, dac, skipRestricted) if numCha > 0: locCount += numLoc chaCount += numCha extCount += len(e) objCount += numLoc + numCha + extCount if not HTTP.checkObjects(req, objCount, self._maxObj): return False dataloggers |= d sensors |= s extents |= e elif self._matchStation(net, sta, ro, dac): if ro.includeSta: newSta = DataModel.Station(sta) # Copy comments for i in xrange(sta.commentCount()): newSta.add(DataModel.Comment(sta.comment(i))) newNet.add(newSta) else: # no station output requested: one matching station # is sufficient to include the network newInv.add(newNet) objCount += 1 break if newNet.stationCount() > 0: newInv.add(newNet) staCount += newNet.stationCount() objCount += staCount + 1 # Return 204 if no matching inventory was found if newInv.networkCount() == 0: msg = "no matching inventory found" data = HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro) if data: utils.writeTS(req, data) return True # Copy references (dataloggers, responses, sensors) decCount, resCount = 0, 0 if ro.includeCha: decCount = self._copyReferences(newInv, req, objCount, self._inv, ro, dataloggers, sensors, self._maxObj) if decCount is None: return False else: resCount = newInv.responsePAZCount() + \ newInv.responseFIRCount() + \ newInv.responsePolynomialCount() + \ newInv.responseFAPCount() + \ newInv.responseIIRCount() objCount += resCount + decCount + newInv.dataloggerCount() + \ newInv.sensorCount() # Copy data extents objOut = newInv if len(extents) > 0: objCount += 1 da = DataModel.DataAvailability() for e in extents: da.add(DataModel.DataExtent(e)) objOut = ExportObjectList() objOut.append(newInv) objOut.append(da) sink = utils.Sink(req) if not exp.write(sink, objOut): return False Logging.debug("%s: returned %iNet, %iSta, %iLoc, %iCha, " \ "%iDL, %iDec, %iSen, %iRes, %iDAExt (total objects/" \ "bytes: %i/%i) " % (ro.service, newInv.networkCount(), staCount, locCount, chaCount, newInv.dataloggerCount(), decCount, newInv.sensorCount(), resCount, extCount, objCount, sink.written)) utils.accessLog(req, ro, http.OK, sink.written, None) return True
def _processRequest(self, req, ro): if ro.quality != 'B' and ro.quality != 'M': msg = "quality other than 'B' or 'M' not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.minimumLength: msg = "enforcing of minimum record length not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.longestOnly: msg = "limitation to longest segment not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) app = Application.Instance() ro._checkTimes(app._realtimeGap) # Open record stream rs = RecordStream.Open(self._rsURL) if rs is None: msg = "could not open record stream" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) maxSamples = None if app._samplesM is not None: maxSamples = app._samplesM * 1000000 samples = 0 # Add request streams # iterate over inventory networks for s in ro.streams: for net in self._networkIter(s): if ro.userName is None and utils.isRestricted(net): continue for sta in self._stationIter(net, s): if ro.userName is None and utils.isRestricted(sta): continue for loc in self._locationIter(sta, s): for cha in self._streamIter(loc, s): # enforce maximum sample per request restriction if maxSamples is not None: try: n = cha.sampleRateNumerator() d = cha.sampleRateDenominator() except ValueException: msg = "skipping stream without sampling " \ "rate definition: %s.%s.%s.%s" % ( net.code(), sta.code(), loc.code(), cha.code()) Logging.warning(msg) continue # calculate number of samples for requested # time window diffSec = (s.time.end - s.time.start).length() samples += int(diffSec * n / d) if samples > maxSamples: msg = "maximum number of %sM samples " \ "exceeded" % str(app._samplesM) return HTTP.renderErrorPage(req, http.REQUEST_ENTITY_TOO_LARGE, msg, ro) Logging.debug("adding stream: %s.%s.%s.%s %s - %s" \ % (net.code(), sta.code(), loc.code(), cha.code(), s.time.start.iso(), s.time.end.iso())) rs.addStream(net.code(), sta.code(), loc.code(), cha.code(), s.time.start, s.time.end) # Build output filename fileName = Application.Instance()._fileNamePrefix+'.mseed' # Create producer for async IO req.registerProducer(_WaveformProducer(req, ro, rs, fileName), False) # The request is handled by the deferred object return server.NOT_DONE_YET
sr = '' try: end = stream.end().toString(df) except ValueException: end = '' line = lineFmt % (net.code(), sta.code(), loc.code(), stream.code(), lat, lon, elev, depth, azi, dip, desc, scale, scaleFreq, scaleUnit, sr, stream.start().toString(df), end) req.write(line) lineCount += 1 byteCount += len(line) # Return 204 if no matching inventory was found if lineCount == 0: req.write(HTTP.renderErrorPage(req, http.NO_CONTENT, "no matching inventory found", ro)) return False Logging.notice("%s: returned %i lines (total bytes: %i)" % ( ro.service, lineCount, byteCount)) utils.accessLog(req, ro, http.OK, byteCount, None) return True #--------------------------------------------------------------------------- # Checks if at least one location and channel combination matches the # request options @staticmethod def _matchStation(sta, ro): # No filter: return true immediately
def _processRequest(self, req, ro, exp): if req._disconnected: return False maxObj = Application.Instance()._queryObjects staCount, chaCount, locCount, objCount = 0, 0, 0, 0 DataModel.PublicObject.SetRegistrationEnabled(False) inv = Application.Instance()._inv newInv = DataModel.Inventory() filterChannel = ro.channel and (ro.channel.loc or ro.channel.cha) # iterate over inventory networks for net in utils.networkIter(inv, ro): if not utils.checkObjects(req, objCount, maxObj): return False newNet = DataModel.Network(net) # iterate over inventory stations of current network for sta in utils.stationIter(net, ro): if ro.includeCha: numCha, numLoc = self._processStation(newNet, sta, ro) if numCha > 0: chaCount += numCha locCount += numLoc objCount += numCha + numLoc if not utils.checkObjects(req, objCount, maxObj): return False elif self._matchStation(sta, ro): if ro.includeSta: newNet.add(DataModel.Station(sta)) else: # no station output requested: one matching station is # sufficient to include the network newInv.add(newNet) objCount += 1 break if newNet.stationCount() > 0: newInv.add(newNet) staCount += newNet.stationCount() objCount += staCount + 1 # Return 404 if no matching station was found if newInv.networkCount() == 0: utils.writeTS(req, HTTP.renderErrorPage(req, http.NOT_FOUND, "No stations found")) return False # Copy references (if object limit allows to do so) if ro.includeCha: objCount += self._chaLevelCount if ro.includeRes: objCount += self._resLevelCount if not utils.checkObjects(req, objCount, maxObj): return False self._copyReferences(newInv, inv, ro) req.setHeader("Content-Type", "text/xml") if not exp.write(utils.Sink(req), newInv): return False Logging.notice("WS-Station: Returned %i networks, %i stations and %i " \ "streams (total objects: %i)" % (newInv.networkCount(), staCount, chaCount, objCount)) return True
"%s|%s|%s|%s|%s|%s\n" % ( net.code(), sta.code(), loc.code(), stream.code(), lat, lon, elev, depth, azi, dip, desc, scale, scaleFreq, scaleUnit, sr, start, end))) # sort lines and append to final data string lines.sort(key = lambda line: line[0]) for line in lines: data += line[1] # Return 204 if no matching inventory was found if len(lines) == 0: msg = "no matching inventory found" data = HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro) if data: utils.writeTS(req, data) return False utils.writeTS(req, data) Logging.debug("%s: returned %i lines (total bytes: %i)" % ( ro.service, len(lines), len(data))) utils.accessLog(req, ro, http.OK, len(data), None) return True #--------------------------------------------------------------------------- # Checks if at least one location and channel combination matches the # request options @staticmethod