def createCommandLineDescription(self): try: self.commandline().addGroup("Parameters") self.commandline().addStringOption("Parameters", "coord", "lat,lon,dep of origin") self.commandline().addStringOption("Parameters", "time", "time of origin") except: Logging.warning("caught unexpected error %s" % sys.exc_info())
def renderErrorPage(request, code, msg, ro=None): resp = """\ Error %i: %s %s Usage details are available from %s Request: %s Request Submitted: %s Service Version: %s """ # rewrite response code if requested and no data was found if ro is not None and code == http.NO_CONTENT: code = ro.noData request.setHeader('Content-Type', 'text/plain') request.setResponseCode(code) reference = "%s/" % (request.path.rpartition('/')[0]) codeStr = http.RESPONSES[code] Logging.warning("responding with error: %i (%s)" % (code, codeStr)) date = Core.Time.GMT().toString("%FT%T.%f") response = resp % (code, codeStr, msg, reference, request.uri, date, VERSION) utils.accessLog(request, ro, code, len(response), msg) return response
def render_GET(self, req): # Parse and validate GET parameters ro = _EventRequestOptions(req.args) try: ro.parse() except ValueError, e: Logging.warning(str(e)) return HTTP.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)
def render_POST(self, req): # Parse and validate POST parameters ro = _StationRequestOptions() try: ro.parsePOST(req.content) ro.parse() except ValueError, e: Logging.warning(str(e)) return HTTP.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)
def render_POST(self, request): request.setHeader('Content-Type', 'text/plain') try: verified = self.__gpg.decrypt(request.content.getvalue()) except OSError, e: msg = "gpg decrypt error" Logging.warning("%s: %s" % (msg, str(e))) return HTTP.renderErrorPage(request, http.INTERNAL_SERVER_ERROR, msg, None)
def render_GET(self, req): # Parse and validate GET parameters ro = _StationRequestOptions(req.args) try: ro.parse() # the GET operation supports exactly one stream filter ro.streams.append(ro) except ValueError, e: Logging.warning(str(e)) return HTTP.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)
def render_POST(self, request): request.setHeader('Content-Type', 'text/plain') try: verified = self.__gpg.decrypt(request.content.getvalue()) except Exception, e: msg = "invalid token" Logging.warning("%s: %s" % (msg, str(e))) return HTTP.renderErrorPage(request, http.BAD_REQUEST, msg, None)
def render_POST(self, req): # Parse and validate POST parameters ro = _DataSelectRequestOptions() ro.userName = self.__user and self.__user.get('mail') try: ro.parsePOST(req.content) ro.parse() except ValueError, e: Logging.warning(str(e)) return HTTP.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)
def resumeProducing(self): rec = None data = "" while len(data) < self.bufSize: try: rec = self.rsInput.next() if rec: data += rec.raw().str() else: break except Exception, e: Logging.warning("%s" % str(e)) break
def render_GET(self, req): # Parse and validate POST parameters ro = _DataSelectRequestOptions(req.args) ro.userName = self.__user and self.__user.get('mail') try: ro.parse() # the GET operation supports exactly one stream filter ro.streams.append(ro) except ValueError, e: Logging.warning(str(e)) return HTTP.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)
def render_GET(self, req): # No archive no service if not os.path.isdir(self._sdsPath): msg = "SDS archive not found: %s" % self._sdsPath return HTTP.renderErrorPage(request, http.SERVICE_UNAVAILABLE, msg) # Parse and validate GET parameters try: ro = _DataSelectRequestOptions(req.args) ro.parse() except ValueError, e: Logging.warning(str(e)) return HTTP.renderErrorPage(req, http.BAD_REQUEST, str(e))
def resumeProducing(self): rec = None try: rec = self.rsInput.next() except Exception, e: Logging.warning("%s" % str(e)) if self.written == 0: # read first record to test if any data exists at all if not rec: msg = "no waveform data found" data = HTTP.renderErrorPage(self.req, http.NO_CONTENT, msg, self.ro) if data: self.req.write(data) self.req.unregisterProducer() self.req.finish() return self.req.setHeader('Content-Type', 'application/vnd.fdsn.mseed') self.req.setHeader('Content-Disposition', "attachment; " \ "filename=%s" % self.fileName) if not rec: self.req.unregisterProducer() Logging.debug("%s: returned %i bytes of mseed data" % ( self.ro.service, self.written)) utils.accessLog(self.req, self.ro, http.OK, self.written, None) self.req.finish() return data = rec.raw().str() self.req.write(data) self.written += len(data)
def renderErrorPage(request, code, msg): html = """\ <html> <head><title>%i - %s</title></head> <body> <h1>%s</h1> <p>%s</p> </body> </html>""" request.setHeader("Content-Type", "text/html") request.setResponseCode(code) codeStr = http.RESPONSES[code] Logging.warning("Responding with error: %i (%s)" % (code, codeStr)) return html % (code, codeStr, codeStr, msg)
def resumeProducing(self): rec = None try: rec = self.rsInput.next() except Exception, e: Logging.warning("%s" % str(e)) # e.g. ArchiveException if not self.initialized: self.initialized = True # read first record to test if any data exists at all if not rec: msg = "No waveform data found" self.req.write(HTTP.renderErrorPage(self.req, http.NOT_FOUND, msg)) self.req.unregisterProducer() self.req.finish() return self.req.setHeader("Content-Type", "application/vnd.fdsn.mseed") self.req.setHeader("Content-Disposition", "attachment; filename=%s" % \ self.fileName) if not rec: self.req.unregisterProducer() self.req.finish() return self.req.write(rec.raw().str())
def _addStream(self, ro, streams, toks, lastFileName): start, end = Time(), Time() if start.fromString("%s.%s" % (toks[4], toks[5]), "%Y.%j") and \ end.fromString(lastFileName[-8:] + "23:59:59", "%Y.%j%T"): # match time if ro.time.start > end or \ (ro.time.end and ro.time.end < start): return # limit time to requested time if ro.time.start > start: start = ro.time.start if ro.time.end and ro.time.end < end: end = ro.time.end streams.append((toks[1], toks[2], start, end)) else: Logging.warning("invalid stream information: %s%s.%s" % ( toks[0], toks[1], toks[2]))
def send(self, *args): while not self.connection().send(*args): Logging.warning("send failed, retrying") time.sleep(1)
return HTTP.renderErrorPage(request, http.BAD_REQUEST, msg, None) try: attributes = json.loads(verified.data) td = dateutil.parser.parse(attributes['valid_until']) - \ datetime.datetime.now(dateutil.tz.tzutc()) lifetime = td.seconds + td.days * 24 * 3600 except Exception, e: msg = "token has invalid validity" Logging.warning("%s: %s" % (msg, str(e))) return HTTP.renderErrorPage(request, http.BAD_REQUEST, msg, None) if lifetime <= 0: msg = "token is expired" Logging.warning(msg) return HTTP.renderErrorPage(request, http.BAD_REQUEST, msg, None) userid = base64.urlsafe_b64encode(hashlib.sha256(verified.data).digest()[:18]) password = self.__userdb.addUser(userid, attributes, time.time() + min(lifetime, 24 * 3600), verified.data) utils.accessLog(request, None, http.OK, len(userid)+len(password)+1, None) return '%s:%s' % (userid, password) ################################################################################ class Site(server.Site): #--------------------------------------------------------------------------- def getResourceFor(self, request): Logging.debug("request (%s): %s" % (request.getClientIP(),
def _processRequest(self, req, ro): #pylint: disable=W0212 if ro.quality != 'B' and ro.quality != 'M': msg = "quality other than 'B' or 'M' not supported" return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro) if ro.minimumLength: msg = "enforcing of minimum record length not supported" return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro) if ro.longestOnly: msg = "limitation to longest segment not supported" return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro) app = Application.Instance() ro._checkTimes(app._realtimeGap) maxSamples = None if app._samplesM is not None: maxSamples = app._samplesM * 1000000 samples = 0 trackerList = [] if app._trackdbEnabled or app._requestLog: xff = req.requestHeaders.getRawHeaders("x-forwarded-for") if xff: userIP = xff[0].split(",")[0].strip() else: userIP = req.getClientIP() clientID = req.getHeader("User-Agent") if clientID: clientID = clientID[:80] else: clientID = "fdsnws" if app._trackdbEnabled: if ro.userName: userID = ro.userName else: userID = app._trackdbDefaultUser reqID = 'ws' + str(int(round(time.time() * 1000) - 1420070400000)) tracker = RequestTrackerDB(clientID, app.connection(), reqID, "WAVEFORM", userID, "REQUEST WAVEFORM " + reqID, "fdsnws", userIP, req.getClientIP()) trackerList.append(tracker) if app._requestLog: tracker = app._requestLog.tracker(ro.service, ro.userName, userIP, clientID) trackerList.append(tracker) # Open record stream rs = _MyRecordStream(self._rsURL, trackerList, self.__bufferSize) forbidden = None # Add request streams # iterate over inventory networks for s in ro.streams: for net in self._networkIter(s): netRestricted = utils.isRestricted(net) if not trackerList and netRestricted and not self.__user: forbidden = forbidden or (forbidden is None) continue for sta in self._stationIter(net, s): staRestricted = utils.isRestricted(sta) if not trackerList and staRestricted and not self.__user: forbidden = forbidden or (forbidden is None) continue for loc in self._locationIter(sta, s): for cha in self._streamIter(loc, s): start_time = max(cha.start(), s.time.start) try: end_time = min(cha.end(), s.time.end) except ValueError: end_time = s.time.end if (netRestricted or staRestricted or utils.isRestricted(cha) ) and (not self.__user or (self.__access and not self.__access.authorize( self.__user, net.code(), sta.code(), loc.code(), cha.code(), start_time, end_time))): for tracker in trackerList: net_class = 't' if net.code()[0] \ in "0123456789XYZ" else 'p' tracker.line_status( start_time, end_time, net.code(), sta.code(), cha.code(), loc.code(), True, net_class, True, [], "fdsnws", "DENIED", 0, "") forbidden = forbidden or (forbidden is None) continue forbidden = False # enforce maximum sample per request restriction if maxSamples is not None: try: n = cha.sampleRateNumerator() d = cha.sampleRateDenominator() except ValueError: msg = "skipping stream without sampling " \ "rate definition: %s.%s.%s.%s" % ( net.code(), sta.code(), loc.code(), cha.code()) Logging.warning(msg) continue # calculate number of samples for requested # time window diffSec = (end_time - start_time).length() samples += int(diffSec * n / d) if samples > maxSamples: msg = "maximum number of %sM samples " \ "exceeded" % str(app._samplesM) return self.renderErrorPage( req, http.REQUEST_ENTITY_TOO_LARGE, msg, ro) Logging.debug( "adding stream: %s.%s.%s.%s %s - %s" % (net.code(), sta.code(), loc.code(), cha.code(), start_time.iso(), end_time.iso())) rs.addStream(net.code(), sta.code(), loc.code(), cha.code(), start_time, end_time, utils.isRestricted(cha), sta.archiveNetworkCode()) if forbidden: for tracker in trackerList: tracker.volume_status("fdsnws", "DENIED", 0, "") tracker.request_status("END", "") msg = "access denied" return self.renderErrorPage(req, http.FORBIDDEN, msg, ro) if forbidden is None: for tracker in trackerList: tracker.volume_status("fdsnws", "NODATA", 0, "") tracker.request_status("END", "") msg = "no metadata found" return self.renderErrorPage(req, http.NO_CONTENT, msg, ro) # Build output filename fileName = Application.Instance()._fileNamePrefix.replace( "%time", time.strftime('%Y-%m-%dT%H:%M:%S')) + '.mseed' # Create producer for async IO prod = _WaveformProducer(req, ro, rs, fileName, trackerList) req.registerProducer(prod, True) prod.resumeProducing() # The request is handled by the deferred object return server.NOT_DONE_YET
def _processRequest(self, req, ro): if ro.quality != 'B' and ro.quality != 'M': msg = "quality other than 'B' or 'M' not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.minimumLength: msg = "enforcing of minimum record length not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.longestOnly: msg = "limitation to longest segment not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) app = Application.Instance() ro._checkTimes(app._realtimeGap) # Open record stream rs = RecordStream.Open(self._rsURL) if rs is None: msg = "could not open record stream" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) maxSamples = None if app._samplesM is not None: maxSamples = app._samplesM * 1000000 samples = 0 app = Application.Instance() if app._trackdbEnabled: userid = ro.userName or app._trackdbDefaultUser reqid = 'ws' + str(int(round(time.time() * 1000) - 1420070400000)) tracker = RequestTrackerDB("fdsnws", app.connection(), reqid, "WAVEFORM", userid, "REQUEST WAVEFORM " + reqid, "fdsnws", req.getClientIP(), req.getClientIP()) else: tracker = None # Add request streams # iterate over inventory networks for s in ro.streams: for net in self._networkIter(s): for sta in self._stationIter(net, s): for loc in self._locationIter(sta, s): for cha in self._streamIter(loc, s): if utils.isRestricted(cha) and \ (not self.__user or (self.__access and not self.__access.authorize(self.__user, net.code(), sta.code(), loc.code(), cha.code(), s.time.start, s.time.end))): continue # enforce maximum sample per request restriction if maxSamples is not None: try: n = cha.sampleRateNumerator() d = cha.sampleRateDenominator() except ValueException: msg = "skipping stream without sampling " \ "rate definition: %s.%s.%s.%s" % ( net.code(), sta.code(), loc.code(), cha.code()) Logging.warning(msg) continue # calculate number of samples for requested # time window diffSec = (s.time.end - s.time.start).length() samples += int(diffSec * n / d) if samples > maxSamples: msg = "maximum number of %sM samples " \ "exceeded" % str(app._samplesM) return HTTP.renderErrorPage( req, http.REQUEST_ENTITY_TOO_LARGE, msg, ro) Logging.debug("adding stream: %s.%s.%s.%s %s - %s" \ % (net.code(), sta.code(), loc.code(), cha.code(), s.time.start.iso(), s.time.end.iso())) rs.addStream(net.code(), sta.code(), loc.code(), cha.code(), s.time.start, s.time.end) if tracker: tracker.line_status(s.time.start, s.time.end, net.code(), sta.code(), cha.code(), loc.code(), False, "", True, [], "fdsnws", "OK", 0, "") # Build output filename fileName = Application.Instance()._fileNamePrefix.replace( "%time", time.strftime('%Y-%m-%dT%H:%M:%S')) + '.mseed' # Create producer for async IO req.registerProducer(_WaveformProducer(req, ro, rs, fileName, tracker), False) # The request is handled by the deferred object return server.NOT_DONE_YET
def _processRequest(self, req, ro): if ro.quality != 'B' and ro.quality != 'M': msg = "quality other than 'B' or 'M' not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.minimumLength: msg = "enforcing of minimum record length not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.longestOnly: msg = "limitation to longest segment not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) app = Application.Instance() ro._checkTimes(app._realtimeGap) # Open record stream rs = RecordStream.Open(self._rsURL) if rs is None: msg = "could not open record stream" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) maxSamples = None if app._samplesM is not None: maxSamples = app._samplesM * 1000000 samples = 0 # Add request streams # iterate over inventory networks for s in ro.streams: for net in self._networkIter(s): if ro.userName is None and utils.isRestricted(net): continue for sta in self._stationIter(net, s): if ro.userName is None and utils.isRestricted(sta): continue for loc in self._locationIter(sta, s): for cha in self._streamIter(loc, s): # enforce maximum sample per request restriction if maxSamples is not None: try: n = cha.sampleRateNumerator() d = cha.sampleRateDenominator() except ValueException: msg = "skipping stream without sampling " \ "rate definition: %s.%s.%s.%s" % ( net.code(), sta.code(), loc.code(), cha.code()) Logging.warning(msg) continue # calculate number of samples for requested # time window diffSec = (s.time.end - s.time.start).length() samples += int(diffSec * n / d) if samples > maxSamples: msg = "maximum number of %sM samples " \ "exceeded" % str(app._samplesM) return HTTP.renderErrorPage(req, http.REQUEST_ENTITY_TOO_LARGE, msg, ro) Logging.debug("adding stream: %s.%s.%s.%s %s - %s" \ % (net.code(), sta.code(), loc.code(), cha.code(), s.time.start.iso(), s.time.end.iso())) rs.addStream(net.code(), sta.code(), loc.code(), cha.code(), s.time.start, s.time.end) # Build output filename fileName = Application.Instance()._fileNamePrefix+'.mseed' # Create producer for async IO req.registerProducer(_WaveformProducer(req, ro, rs, fileName), False) # The request is handled by the deferred object return server.NOT_DONE_YET
try: verified = self.__gpg.decrypt(request.content.getvalue()) except OSError, e: msg = "gpg decrypt error" Logging.warning("%s: %s" % (msg, str(e))) return HTTP.renderErrorPage(request, http.INTERNAL_SERVER_ERROR, msg, None) except Exception, e: msg = "invalid token" Logging.warning("%s: %s" % (msg, str(e))) return HTTP.renderErrorPage(request, http.BAD_REQUEST, msg, None) if verified.trust_level is None or verified.trust_level < verified.TRUST_FULLY: msg = "token has invalid signature" Logging.warning(msg) return HTTP.renderErrorPage(request, http.BAD_REQUEST, msg, None) try: attributes = json.loads(verified.data) td = dateutil.parser.parse(attributes['valid_until']) - \ datetime.datetime.now(dateutil.tz.tzutc()) lifetime = td.seconds + td.days * 24 * 3600 except Exception, e: msg = "token has invalid validity" Logging.warning("%s: %s" % (msg, str(e))) return HTTP.renderErrorPage(request, http.BAD_REQUEST, msg, None) if lifetime <= 0: msg = "token is expired"
def _processRequest(self, req, ro): if ro.quality != 'B' and ro.quality != 'M': msg = "quality other than 'B' or 'M' not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.minimumLength: msg = "enforcing of minimum record length not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.longestOnly: msg = "limitation to longest segment not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) app = Application.Instance() ro._checkTimes(app._realtimeGap) # Open record stream rs = RecordStream.Open(self._rsURL) if rs is None: msg = "could not open record stream" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) maxSamples = None if app._samplesM is not None: maxSamples = app._samplesM * 1000000 samples = 0 app = Application.Instance() if app._trackdbEnabled: userid = ro.userName or app._trackdbDefaultUser reqid = 'ws' + str(int(round(time.time() * 1000) - 1420070400000)) tracker = RequestTrackerDB("fdsnws", app.connection(), reqid, "WAVEFORM", userid, "REQUEST WAVEFORM " + reqid, "fdsnws", req.getClientIP(), req.getClientIP()) else: tracker = None # Add request streams # iterate over inventory networks for s in ro.streams: for net in self._networkIter(s): for sta in self._stationIter(net, s): for loc in self._locationIter(sta, s): for cha in self._streamIter(loc, s): if utils.isRestricted(cha) and (self.__user is None or \ not self.__access.authorize(self.__user, net.code(), sta.code(), loc.code(), cha.code(), s.time.start, s.time.end)): continue # enforce maximum sample per request restriction if maxSamples is not None: try: n = cha.sampleRateNumerator() d = cha.sampleRateDenominator() except ValueException: msg = "skipping stream without sampling " \ "rate definition: %s.%s.%s.%s" % ( net.code(), sta.code(), loc.code(), cha.code()) Logging.warning(msg) continue # calculate number of samples for requested # time window diffSec = (s.time.end - s.time.start).length() samples += int(diffSec * n / d) if samples > maxSamples: msg = "maximum number of %sM samples " \ "exceeded" % str(app._samplesM) return HTTP.renderErrorPage(req, http.REQUEST_ENTITY_TOO_LARGE, msg, ro) Logging.debug("adding stream: %s.%s.%s.%s %s - %s" \ % (net.code(), sta.code(), loc.code(), cha.code(), s.time.start.iso(), s.time.end.iso())) rs.addStream(net.code(), sta.code(), loc.code(), cha.code(), s.time.start, s.time.end) if tracker: tracker.line_status(s.time.start, s.time.end, net.code(), sta.code(), cha.code(), loc.code(), False, "", True, [], "fdsnws", "OK", 0, "") # Build output filename fileName = Application.Instance()._fileNamePrefix.replace("%time", time.strftime('%Y-%m-%dT%H:%M:%S'))+'.mseed' # Create producer for async IO req.registerProducer(_WaveformProducer(req, ro, rs, fileName, tracker), False) # The request is handled by the deferred object return server.NOT_DONE_YET
def _processRequest(self, req, ro): if ro.quality != 'B' and ro.quality != 'M': msg = "quality other than 'B' or 'M' not supported" return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro) if ro.minimumLength: msg = "enforcing of minimum record length not supported" return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro) if ro.longestOnly: msg = "limitation to longest segment not supported" return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro) app = Application.Instance() ro._checkTimes(app._realtimeGap) maxSamples = None if app._samplesM is not None: maxSamples = app._samplesM * 1000000 samples = 0 app = Application.Instance() if app._trackdbEnabled: userid = ro.userName or app._trackdbDefaultUser reqid = 'ws' + str(int(round(time.time() * 1000) - 1420070400000)) xff = req.requestHeaders.getRawHeaders("x-forwarded-for") if xff: userIP = xff[0].split(",")[0].strip() else: userIP = req.getClientIP() tracker = RequestTrackerDB("fdsnws", app.connection(), reqid, "WAVEFORM", userid, "REQUEST WAVEFORM " + reqid, "fdsnws", userIP, req.getClientIP()) else: tracker = None # Open record stream rs = _MyRecordStream(self._rsURL, tracker, self.__bufferSize) forbidden = None # Add request streams # iterate over inventory networks for s in ro.streams: for net in self._networkIter(s): for sta in self._stationIter(net, s): for loc in self._locationIter(sta, s): for cha in self._streamIter(loc, s): try: start_time = max(cha.start(), s.time.start) except Exception: start_time = s.time.start try: end_time = min(cha.end(), s.time.end) except Exception: end_time = s.time.end if utils.isRestricted(cha) and \ (not self.__user or (self.__access and not self.__access.authorize(self.__user, net.code(), sta.code(), loc.code(), cha.code(), start_time, end_time))): if tracker: net_class = 't' if net.code()[0] in "0123456789XYZ" else 'p' tracker.line_status(start_time, end_time, net.code(), sta.code(), cha.code(), loc.code(), True, net_class, True, [], "fdsnws", "DENIED", 0, "") forbidden = forbidden or (forbidden is None) continue forbidden = False # enforce maximum sample per request restriction if maxSamples is not None: try: n = cha.sampleRateNumerator() d = cha.sampleRateDenominator() except ValueError: msg = "skipping stream without sampling " \ "rate definition: %s.%s.%s.%s" % ( net.code(), sta.code(), loc.code(), cha.code()) Logging.warning(msg) continue # calculate number of samples for requested # time window diffSec = (end_time - start_time).length() samples += int(diffSec * n / d) if samples > maxSamples: msg = "maximum number of %sM samples " \ "exceeded" % str(app._samplesM) return HTTP.renderErrorPage(req, http.REQUEST_ENTITY_TOO_LARGE, msg, ro) Logging.debug("adding stream: %s.%s.%s.%s %s - %s" \ % (net.code(), sta.code(), loc.code(), cha.code(), start_time.iso(), end_time.iso())) rs.addStream(net.code(), sta.code(), loc.code(), cha.code(), start_time, end_time, utils.isRestricted(cha), sta.archiveNetworkCode()) if forbidden: if tracker: tracker.volume_status("fdsnws", "NODATA", 0, "") tracker.request_status("END", "") msg = "access denied" return HTTP.renderErrorPage(req, http.FORBIDDEN, msg, ro) elif forbidden is None: if tracker: tracker.volume_status("fdsnws", "NODATA", 0, "") tracker.request_status("END", "") msg = "no metadata found" return HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro) # Build output filename fileName = Application.Instance()._fileNamePrefix.replace("%time", time.strftime('%Y-%m-%dT%H:%M:%S')) + '.mseed' # Create producer for async IO prod = _WaveformProducer(req, ro, rs, fileName, tracker) req.registerProducer(prod, True) prod.resumeProducing() # The request is handled by the deferred object return server.NOT_DONE_YET
# unknown key else: Logging.warning('Line %i: ignoring unknown parameter: %s' % (iLine, key)) except ValueError, ve: Logging.warning('Line %i: can not parse %s value' % (iLine, key)) except Exception: Logging.error('Line %i: %s' % (iLine, str(traceback.format_exc()))) return None # check if not latFound: Logging.warning('could not add origin, missing latitude parameter') elif not lonFound: Logging.warning( 'could not add origin, missing longitude parameter') elif not origin.time().value().valid(): Logging.warning( 'could not add origin, missing origin time parameter') else: if magnitudeMB is not None: origin.add(magnitudeMB) if magnitudeML is not None: origin.add(magnitudeML) if magnitudeMS is not None: origin.add(magnitudeMS) if magnitudeBB is not None: origin.add(magnitudeBB)
def _processRequestText(self, req, ro, dbq, ep): lineCount = 0 line = "#EventID|Time|Latitude|Longitude|Depth/km|Author|Catalog|" \ "Contributor|ContributorID|MagType|Magnitude|MagAuthor|" \ "EventLocationName|EventType\n" df = "%FT%T.%f" utils.writeTS(req, line) byteCount = len(line) # add related information for iEvent in range(ep.eventCount()): e = ep.event(iEvent) eID = e.publicID() # query for preferred origin obj = dbq.getObject(DataModel.Origin.TypeInfo(), e.preferredOriginID()) o = DataModel.Origin.Cast(obj) if o is None: Logging.warning("preferred origin of event '%s' not found: " "%s" % (eID, e.preferredOriginID())) continue # depth try: depth = str(o.depth().value()) except ValueError: depth = '' # author if self._hideAuthor: author = '' else: try: author = o.creationInfo().author() except ValueError: author = '' # contributor try: contrib = e.creationInfo().agencyID() except ValueError: contrib = '' # query for preferred magnitude (if any) mType, mVal, mAuthor = '', '', '' if e.preferredMagnitudeID(): obj = dbq.getObject(DataModel.Magnitude.TypeInfo(), e.preferredMagnitudeID()) m = DataModel.Magnitude.Cast(obj) if m is not None: mType = m.type() mVal = str(m.magnitude().value()) if self._hideAuthor: mAuthor = '' else: try: mAuthor = m.creationInfo().author() except ValueError: pass # event description dbq.loadEventDescriptions(e) region = '' for i in range(e.eventDescriptionCount()): ed = e.eventDescription(i) if ed.type() == DataModel.REGION_NAME: region = ed.text() break # event type try: eType = DataModel.QMLTypeMapper.EventTypeToString(e.type()) except ValueError: eType = '' if req._disconnected: return False line = "%s|%s|%f|%f|%s|%s||%s|%s|%s|%s|%s|%s|%s\n" % ( eID, o.time().value().toString(df), o.latitude().value(), o.longitude().value(), depth, author, contrib, eID, mType, mVal, mAuthor, region, eType) utils.writeTS(req, line) lineCount += 1 byteCount += len(line) # write response Logging.debug("%s: returned %i events (total bytes: %i) " % (ro.service, lineCount, byteCount)) utils.accessLog(req, ro, http.OK, byteCount, None) return True
def loadStreams(self): now = Core.Time.GMT() inv = Client.Inventory.Instance() self.streams = {} # try to load streams by detecLocid and detecStream mod = self.configModule() if mod is not None and mod.configStationCount() > 0: Logging.info('loading streams using detecLocid and detecStream') for i in range(mod.configStationCount()): cfg = mod.configStation(i) net = cfg.networkCode() sta = cfg.stationCode() if self.streams.has_key(sta): Logging.warning('ambiguous stream id found for station ' '%s.%s' % (net, sta)) continue setup = DataModel.findSetup(cfg, self.name(), True) if not setup: Logging.warning('could not find station setup for %s.%s' % (net, sta)) continue params = DataModel.ParameterSet.Find(setup.parameterSetID()) if not params: Logging.warning('could not find station parameters for ' '%s.%s' % (net, sta)) continue detecLocid = '' detecStream = None for j in xrange(params.parameterCount()): param = params.parameter(j) if param.name() == 'detecStream': detecStream = param.value() elif param.name() == 'detecLocid': detecLocid = param.value() if detecStream is None: Logging.warning('could not find detecStream for %s.%s' % (net, sta)) continue loc = inv.getSensorLocation(net, sta, detecLocid, now) if loc is None: Logging.warning('could not find preferred location for ' '%s.%s' % (net, sta)) continue components = {} tc = DataModel.ThreeComponents() DataModel.getThreeComponents(tc, loc, detecStream[:2], now) if tc.vertical(): cha = tc.vertical() wfsID = DataModel.WaveformStreamID(net, sta, loc.code(), cha.code(), '') components[cha.code()[-1]] = wfsID Logging.debug('add stream %s (vertical)' % wfs2Str(wfsID)) if tc.firstHorizontal(): cha = tc.firstHorizontal() wfsID = DataModel.WaveformStreamID(net, sta, loc.code(), cha.code(), '') components[cha.code()[-1]] = wfsID Logging.debug('add stream %s (first horizontal)' % wfs2Str(wfsID)) if tc.secondHorizontal(): cha = tc.secondHorizontal() wfsID = DataModel.WaveformStreamID(net, sta, loc.code(), cha.code(), '') components[cha.code()[-1]] = wfsID Logging.debug('add stream %s (second horizontal)' % wfs2Str(wfsID)) if len(components) > 0: self.streams[sta] = components return # fallback loading streams from inventory Logging.warning('no configuration module available, loading streams ' 'from inventory and selecting first available stream ' 'matching epoch') for iNet in xrange(inv.inventory().networkCount()): net = inv.inventory().network(iNet) Logging.debug('network %s: loaded %i stations' % (net.code(), net.stationCount())) for iSta in xrange(net.stationCount()): sta = net.station(iSta) try: start = sta.start() if not start <= now: continue except: continue try: end = sta.end() if not now <= end: continue except: pass for iLoc in xrange(sta.sensorLocationCount()): loc = sta.sensorLocation(iLoc) for iCha in range(loc.streamCount()): cha = loc.stream(iCha) wfsID = DataModel.WaveformStreamID( net.code(), sta.code(), loc.code(), cha.code(), '') comp = cha.code()[2] if not self.streams.has_key(sta.code()): components = {} components[comp] = wfsID self.streams[sta.code()] = components else: # Seismic Handler does not support network, # location and channel code: make sure network and # location codes match first item in station # specific steam list oldWfsID = self.streams[sta.code()].values()[0] if net.code() != oldWfsID.networkCode() or \ loc.code() != oldWfsID.locationCode() or \ cha.code()[:2] != oldWfsID.channelCode()[:2]: Logging.warning('ambiguous stream id found ' 'for station %s, ignoring %s' % (sta.code(), wfs2Str(wfsID))) continue self.streams[sta.code()][comp] = wfsID Logging.debug('add stream %s' % wfs2Str(wfsID))
def sh2proc(self, file): ep = DataModel.EventParameters() origin = DataModel.Origin.Create() event = DataModel.Event.Create() origin.setCreationInfo(DataModel.CreationInfo()) origin.creationInfo().setCreationTime(Core.Time.GMT()) originQuality = None originCE = None latFound = False lonFound = False depthError = None originComments = {} # variables, reset after 'end of phase' pick = None stationMag = None staCode = None compCode = None stationMagBB = None amplitudeDisp = None amplitudeVel = None amplitudeSNR = None amplitudeBB = None magnitudeMB = None magnitudeML = None magnitudeMS = None magnitudeBB = None km2degFac = 1.0 / Math.deg2km(1.0) # read file line by line, split key and value at colon iLine = 0 for line in file: iLine += 1 a = line.split(':', 1) key = a[0].strip() keyLower = key.lower() value = None # empty line if len(keyLower) == 0: continue # end of phase elif keyLower == '--- end of phase ---': if pick is None: Logging.warning('Line %i: found empty phase block' % iLine) continue if staCode is None or compCode is None: Logging.warning('Line %i: end of phase, stream code ' 'incomplete' % iLine) continue if not self.streams.has_key(staCode): Logging.warning('Line %i: end of phase, station code %s ' 'not found in inventory' % (iLine, staCode)) continue if not self.streams[staCode].has_key(compCode): Logging.warning('Line %i: end of phase, component %s of ' 'station %s not found in inventory' % (iLine, compCode, staCode)) continue streamID = self.streams[staCode][compCode] pick.setWaveformID(streamID) ep.add(pick) arrival.setPickID(pick.publicID()) arrival.setPhase(phase) origin.add(arrival) if amplitudeSNR is not None: amplitudeSNR.setPickID(pick.publicID()) amplitudeSNR.setWaveformID(streamID) ep.add(amplitudeSNR) if amplitudeBB is not None: amplitudeBB.setPickID(pick.publicID()) amplitudeBB.setWaveformID(streamID) ep.add(amplitudeBB) if stationMagBB is not None: stationMagBB.setWaveformID(streamID) origin.add(stationMagBB) stationMagContrib = DataModel.StationMagnitudeContribution( ) stationMagContrib.setStationMagnitudeID( stationMagBB.publicID()) if magnitudeBB is None: magnitudeBB = DataModel.Magnitude.Create() magnitudeBB.add(stationMagContrib) if stationMag is not None: if stationMag.type() in ['mb', 'ML' ] and amplitudeDisp is not None: amplitudeDisp.setPickID(pick.publicID()) amplitudeDisp.setWaveformID(streamID) amplitudeDisp.setPeriod( DataModel.RealQuantity(ampPeriod)) amplitudeDisp.setType(stationMag.type()) ep.add(amplitudeDisp) if stationMag.type() in ['Ms(BB)' ] and amplitudeVel is not None: amplitudeVel.setPickID(pick.publicID()) amplitudeVel.setWaveformID(streamID) amplitudeVel.setPeriod( DataModel.RealQuantity(ampPeriod)) amplitudeVel.setType(stationMag.type()) ep.add(amplitudeVel) stationMag.setWaveformID(streamID) origin.add(stationMag) stationMagContrib = DataModel.StationMagnitudeContribution( ) stationMagContrib.setStationMagnitudeID( stationMag.publicID()) magType = stationMag.type() if magType == 'ML': if magnitudeML is None: magnitudeML = DataModel.Magnitude.Create() magnitudeML.add(stationMagContrib) elif magType == 'Ms(BB)': if magnitudeMS is None: magnitudeMS = DataModel.Magnitude.Create() magnitudeMS.add(stationMagContrib) elif magType == 'mb': if magnitudeMB is None: magnitudeMB = DataModel.Magnitude.Create() magnitudeMB.add(stationMagContrib) pick = None staCode = None compCode = None stationMag = None stationMagBB = None amplitudeDisp = None amplitudeVel = None amplitudeSNR = None amplitudeBB = None continue # empty key elif len(a) == 1: Logging.warning('Line %i: key without value' % iLine) continue value = a[1].strip() if pick is None: pick = DataModel.Pick.Create() arrival = DataModel.Arrival() try: ############################################################## # station parameters # station code if keyLower == 'station code': staCode = value # pick time elif keyLower == 'onset time': pick.setTime(DataModel.TimeQuantity(self.parseTime(value))) # pick onset type elif keyLower == 'onset type': found = False for onset in [ DataModel.EMERGENT, DataModel.IMPULSIVE, DataModel.QUESTIONABLE ]: if value == DataModel.EPickOnsetNames_name(onset): pick.setOnset(onset) found = True break if not found: raise Exception('Unsupported onset value') # phase code elif keyLower == 'phase name': phase = DataModel.Phase() phase.setCode(value) pick.setPhaseHint(phase) # event type elif keyLower == 'event type': evttype = EventTypes[value] event.setType(evttype) originComments[key] = value # filter ID elif keyLower == 'applied filter': pick.setFilterID(value) # channel code, prepended by configured Channel prefix if only # one character is found elif keyLower == 'component': compCode = value # pick evaluation mode elif keyLower == 'pick type': found = False for mode in [DataModel.AUTOMATIC, DataModel.MANUAL]: if value == DataModel.EEvaluationModeNames_name(mode): pick.setEvaluationMode(mode) found = True break if not found: raise Exception('Unsupported evaluation mode value') # pick author elif keyLower == 'analyst': creationInfo = DataModel.CreationInfo() creationInfo.setAuthor(value) pick.setCreationInfo(creationInfo) # pick polarity # isn't tested elif keyLower == 'sign': if value == 'positive': sign = '0' # positive elif value == 'negative': sign = '1' # negative else: sign = '2' # unknown pick.setPolarity(float(sign)) # arrival weight elif keyLower == 'weight': arrival.setWeight(float(value)) # arrival azimuth elif keyLower == 'theo. azimuth (deg)': arrival.setAzimuth(float(value)) # pick theo backazimuth elif keyLower == 'theo. backazimuth (deg)': if pick.slownessMethodID() == 'corrected': Logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) else: pick.setBackazimuth( DataModel.RealQuantity(float(value))) pick.setSlownessMethodID('theoretical') # pick beam slowness elif keyLower == 'beam-slowness (sec/deg)': if pick.slownessMethodID() == 'corrected': Logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) else: pick.setHorizontalSlowness( DataModel.RealQuantity(float(value))) pick.setSlownessMethodID('Array Beam') # pick beam backazimuth elif keyLower == 'beam-azimuth (deg)': if pick.slownessMethodID() == 'corrected': Logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) else: pick.setBackazimuth( DataModel.RealQuantity(float(value))) # pick epi slowness elif keyLower == 'epi-slowness (sec/deg)': pick.setHorizontalSlowness( DataModel.RealQuantity(float(value))) pick.setSlownessMethodID('corrected') # pick epi backazimuth elif keyLower == 'epi-azimuth (deg)': pick.setBackazimuth(DataModel.RealQuantity(float(value))) # arrival distance degree elif keyLower == 'distance (deg)': arrival.setDistance(float(value)) # arrival distance km, recalculates for degree elif keyLower == 'distance (km)': if isinstance(arrival.distance(), float): Logging.debug('Line %i: ignoring parameter: %s' % (iLine - 1, 'distance (deg)')) arrival.setDistance(float(value) * km2degFac) # arrival time residual elif keyLower == 'residual time': arrival.setTimeResidual(float(value)) # amplitude snr elif keyLower == 'signal/noise': amplitudeSNR = DataModel.Amplitude.Create() amplitudeSNR.setType('SNR') amplitudeSNR.setAmplitude( DataModel.RealQuantity(float(value))) # amplitude period elif keyLower.startswith('period'): ampPeriod = float(value) # amplitude value for displacement elif keyLower == 'amplitude (nm)': amplitudeDisp = DataModel.Amplitude.Create() amplitudeDisp.setAmplitude( DataModel.RealQuantity(float(value))) amplitudeDisp.setUnit('nm') # amplitude value for velocity elif keyLower.startswith('vel. amplitude'): amplitudeVel = DataModel.Amplitude.Create() amplitudeVel.setAmplitude( DataModel.RealQuantity(float(value))) amplitudeVel.setUnit('nm/s') elif keyLower == 'bb amplitude (nm/sec)': amplitudeBB = DataModel.Amplitude.Create() amplitudeBB.setAmplitude( DataModel.RealQuantity(float(value))) amplitudeBB.setType('mB') amplitudeBB.setUnit('nm/s') amplitudeBB.setPeriod(DataModel.RealQuantity(ampBBPeriod)) elif keyLower == 'bb period (sec)': ampBBPeriod = float(value) elif keyLower == 'broadband magnitude': magType = self.parseMagType('bb') stationMagBB = DataModel.StationMagnitude.Create() stationMagBB.setMagnitude( DataModel.RealQuantity(float(value))) stationMagBB.setType(magType) stationMagBB.setAmplitudeID(amplitudeBB.publicID()) # ignored elif keyLower == 'quality number': Logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) # station magnitude value and type elif keyLower.startswith('magnitude '): magType = self.parseMagType(key[10:]) stationMag = DataModel.StationMagnitude.Create() stationMag.setMagnitude( DataModel.RealQuantity(float(value))) if len(magType) > 0: stationMag.setType(magType) if magType == 'mb': stationMag.setAmplitudeID(amplitudeDisp.publicID()) elif magType == 'MS(BB)': stationMag.setAmplitudeID(amplitudeVel.publicID()) else: Logging.debug('Line %i: Magnitude Type not known %s.' % (iLine, magType)) ############################################################### # origin parameters # event ID, added as origin comment later on elif keyLower == 'event id': originComments[key] = value # magnitude value and type elif keyLower == 'mean bb magnitude': magType = self.parseMagType('bb') if magnitudeBB is None: magnitudeBB = DataModel.Magnitude.Create() magnitudeBB.setMagnitude( DataModel.RealQuantity(float(value))) magnitudeBB.setType(magType) elif keyLower.startswith('mean magnitude '): magType = self.parseMagType(key[15:]) if magType == 'ML': if magnitudeML is None: magnitudeML = DataModel.Magnitude.Create() magnitudeML.setMagnitude( DataModel.RealQuantity(float(value))) magnitudeML.setType(magType) elif magType == 'Ms(BB)': if magnitudeMS is None: magnitudeMS = DataModel.Magnitude.Create() magnitudeMS.setMagnitude( DataModel.RealQuantity(float(value))) magnitudeMS.setType(magType) elif magType == 'mb': if magnitudeMB is None: magnitudeMB = DataModel.Magnitude.Create() magnitudeMB.setMagnitude( DataModel.RealQuantity(float(value))) magnitudeMB.setType(magType) else: Logging.warning( 'Line %i: Magnitude type %s not defined yet.' % (iLine, magType)) # latitude elif keyLower == 'latitude': origin.latitude().setValue(float(value)) latFound = True elif keyLower == 'error in latitude (km)': origin.latitude().setUncertainty(float(value)) # longitude elif keyLower == 'longitude': origin.longitude().setValue(float(value)) lonFound = True elif keyLower == 'error in longitude (km)': origin.longitude().setUncertainty(float(value)) # depth elif keyLower == 'depth (km)': origin.setDepth(DataModel.RealQuantity(float(value))) if depthError is not None: origin.depth().setUncertainty(depthError) elif keyLower == 'depth type': Logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) elif keyLower == 'error in depth (km)': depthError = float(value) try: origin.depth().setUncertainty(depthError) except Core.ValueException: pass # time elif keyLower == 'origin time': origin.time().setValue(self.parseTime(value)) elif keyLower == 'error in origin time': origin.time().setUncertainty(float(value)) # location method elif keyLower == 'location method': origin.setMethodID(str(value)) # region table, added as origin comment later on elif keyLower == 'region table': originComments[key] = value # region table, added as origin comment later on elif keyLower == 'region id': originComments[key] = value # source region, added as origin comment later on elif keyLower == 'source region': originComments[key] = value # used station count elif keyLower == 'no. of stations used': if originQuality is None: originQuality = DataModel.OriginQuality() originQuality.setUsedStationCount(int(value)) # ignored elif keyLower == 'reference location name': Logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) # confidence ellipsoid major axis elif keyLower == 'error ellipse major': if originCE is None: originCE = DataModel.ConfidenceEllipsoid() originCE.setSemiMajorAxisLength(float(value)) # confidence ellipsoid minor axis elif keyLower == 'error ellipse minor': if originCE is None: originCE = DataModel.ConfidenceEllipsoid() originCE.setSemiMinorAxisLength(float(value)) # confidence ellipsoid rotation elif keyLower == 'error ellipse strike': if originCE is None: originCE = DataModel.ConfidenceEllipsoid() originCE.setMajorAxisRotation(float(value)) # azimuthal gap elif keyLower == 'max azimuthal gap (deg)': if originQuality is None: originQuality = DataModel.OriginQuality() originQuality.setAzimuthalGap(float(value)) # creation info author elif keyLower == 'author': origin.creationInfo().setAuthor(value) # creation info agency elif keyLower == 'source of information': origin.creationInfo().setAgencyID(value) # earth model id elif keyLower == 'velocity model': origin.setEarthModelID(value) # standard error elif keyLower == 'rms of residuals (sec)': if originQuality is None: originQuality = DataModel.OriginQuality() originQuality.setStandardError(float(value)) # ignored elif keyLower == 'phase flags': Logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) # ignored elif keyLower == 'location input params': Logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) # missing keys elif keyLower == 'ampl&period source': Logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) elif keyLower == 'location quality': Logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) elif keyLower == 'reference latitude': Logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) elif keyLower == 'reference longitude': Logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) elif keyLower.startswith('amplitude time'): Logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) # unknown key else: Logging.warning('Line %i: ignoring unknown parameter: %s' % (iLine, key)) except ValueError, ve: Logging.warning('Line %i: can not parse %s value' % (iLine, key)) except Exception: Logging.error('Line %i: %s' % (iLine, str(traceback.format_exc()))) return None
def _processRequest(self, req, ro): if ro.quality != 'B' and ro.quality != 'M': msg = "quality other than 'B' or 'M' not supported" return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro) if ro.minimumLength: msg = "enforcing of minimum record length not supported" return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro) if ro.longestOnly: msg = "limitation to longest segment not supported" return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro) app = Application.Instance() ro._checkTimes(app._realtimeGap) maxSamples = None if app._samplesM is not None: maxSamples = app._samplesM * 1000000 samples = 0 app = Application.Instance() if app._trackdbEnabled: userid = ro.userName or app._trackdbDefaultUser reqid = 'ws' + str(int(round(time.time() * 1000) - 1420070400000)) xff = req.requestHeaders.getRawHeaders("x-forwarded-for") if xff: userIP = xff[0].split(",")[0].strip() else: userIP = req.getClientIP() tracker = RequestTrackerDB("fdsnws", app.connection(), reqid, "WAVEFORM", userid, "REQUEST WAVEFORM " + reqid, "fdsnws", userIP, req.getClientIP()) else: tracker = None # Open record stream rs = _MyRecordStream(self._rsURL, tracker, self.__bufferSize) # Add request streams # iterate over inventory networks for s in ro.streams: for net in self._networkIter(s): for sta in self._stationIter(net, s): for loc in self._locationIter(sta, s): for cha in self._streamIter(loc, s): try: start_time = max(cha.start(), s.time.start) except Exception: start_time = s.time.start try: end_time = min(cha.end(), s.time.end) except Exception: end_time = s.time.end if utils.isRestricted(cha) and \ (not self.__user or (self.__access and not self.__access.authorize(self.__user, net.code(), sta.code(), loc.code(), cha.code(), start_time, end_time))): continue # enforce maximum sample per request restriction if maxSamples is not None: try: n = cha.sampleRateNumerator() d = cha.sampleRateDenominator() except ValueError: msg = "skipping stream without sampling " \ "rate definition: %s.%s.%s.%s" % ( net.code(), sta.code(), loc.code(), cha.code()) Logging.warning(msg) continue # calculate number of samples for requested # time window diffSec = (end_time - start_time).length() samples += int(diffSec * n / d) if samples > maxSamples: msg = "maximum number of %sM samples " \ "exceeded" % str(app._samplesM) return HTTP.renderErrorPage(req, http.REQUEST_ENTITY_TOO_LARGE, msg, ro) Logging.debug("adding stream: %s.%s.%s.%s %s - %s" \ % (net.code(), sta.code(), loc.code(), cha.code(), start_time.iso(), end_time.iso())) rs.addStream(net.code(), sta.code(), loc.code(), cha.code(), start_time, end_time, utils.isRestricted(cha), sta.archiveNetworkCode()) # Build output filename fileName = Application.Instance()._fileNamePrefix.replace("%time", time.strftime('%Y-%m-%dT%H:%M:%S')) + '.mseed' # Create producer for async IO prod = _WaveformProducer(req, ro, rs, fileName, tracker) req.registerProducer(prod, True) prod.resumeProducing() # The request is handled by the deferred object return server.NOT_DONE_YET
return HTTP.renderErrorPage(request, http.BAD_REQUEST, msg, None) try: attributes = json.loads(verified.data) td = dateutil.parser.parse(attributes['valid_until']) - \ datetime.datetime.now(dateutil.tz.tzutc()) lifetime = td.seconds + td.days * 24 * 3600 except Exception, e: msg = "token has invalid validity" Logging.warning("%s: %s" % (msg, str(e))) return HTTP.renderErrorPage(request, http.BAD_REQUEST, msg, None) if lifetime <= 0: msg = "token is expired" Logging.warning(msg) return HTTP.renderErrorPage(request, http.BAD_REQUEST, msg, None) userid = base64.urlsafe_b64encode( hashlib.sha256(verified.data).digest()[:18]) password = self.__userdb.addUser( userid, attributes, time.time() + min(lifetime, 24 * 3600), verified.data) utils.accessLog(request, None, http.OK, len(userid) + len(password) + 1, None) return '%s:%s' % (userid, password) ################################################################################ class Site(server.Site):
def _processRequest(self, req, ro): if ro.quality != 'B' and ro.quality != 'M': msg = "quality other than 'B' or 'M' not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.minimumLength: msg = "enforcing of minimum record length not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.longestOnly: msg = "limitation to longest segment not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) app = Application.Instance() ro._checkTimes(app._realtimeGap) # Open record stream rs = RecordStream.Open(self._rsURL) if rs is None: msg = "could not open record stream" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) maxSamples = None if app._samplesM is not None: maxSamples = app._samplesM * 1000000 samples = 0 # Add request streams # iterate over inventory networks inv = Application.Instance()._inv for s in ro.streams: for net in self._networkIter(inv, s): if ro.userName is None and utils.isRestricted(net): continue for sta in self._stationIter(net, s): if ro.userName is None and utils.isRestricted(sta): continue for loc in self._locationIter(sta, s): for cha in self._streamIter(loc, s): # enforce maximum sample per request restriction if maxSamples is not None: try: n = cha.sampleRateNumerator() d = cha.sampleRateDenominator() except ValueException: msg = "skipping stream without sampling " \ "rate definition: %s.%s.%s.%s" % ( net.code(), sta.code(), loc.code(), cha.code()) Logging.warning(msg) continue # calculate number of samples for requested # time window diffSec = (s.time.end - s.time.start).length() samples += int(diffSec * n / d) if samples > maxSamples: msg = "maximum number of %sM samples " \ "exceeded" % str(app._samplesM) return HTTP.renderErrorPage( req, http.REQUEST_ENTITY_TOO_LARGE, msg, ro) Logging.debug("adding stream: %s.%s.%s.%s %s - %s" \ % (net.code(), sta.code(), loc.code(), cha.code(), s.time.start.iso(), s.time.end.iso())) rs.addStream(net.code(), sta.code(), loc.code(), cha.code(), s.time.start, s.time.end) # Build output filename fileName = Application.Instance()._fileNamePrefix + '.mseed' # Create producer for async IO req.registerProducer(_WaveformProducer(req, ro, rs, fileName), False) # The request is handled by the deferred object return server.NOT_DONE_YET
def sh2proc(self, file): ep = DataModel.EventParameters() magnitude = DataModel.Magnitude.Create() origin = DataModel.Origin.Create() origin.setCreationInfo(DataModel.CreationInfo()) origin.creationInfo().setCreationTime(Core.Time.GMT()) originQuality = None originCE = None latFound = False lonFound = False depthError = None originComments = {} # phase variables, reset after 'end of phase' pick = None stationMag = None staCode = None compCode = None # read file line by line, split key and value at colon iLine = 0 for line in file: iLine += 1 a = line.split(':', 1) key = a[0].strip() keyLower = key.lower() value = None # empty line if len(keyLower) == 0: continue # end of phase elif keyLower == '--- end of phase ---': if pick is None: Logging.warning('Line %i: found empty phase block' % iLine) continue if staCode is None or compCode is None: Logging.warning('Line %i: end of phase, stream code ' \ 'incomplete' % iLine) continue if not self.streams.has_key(staCode): Logging.warning('Line %i: end of phase, station code %s ' \ 'not found in inventory' % (iLine, staCode)) continue if not self.streams[staCode].has_key(compCode): Logging.warning('Line %i: end of phase, component %s of ' \ 'station %s not found in inventory' % ( iLine, compCode, staCode)) continue streamID = self.streams[staCode][compCode] pick.setWaveformID(streamID) ep.add(pick) arrival.setPickID(pick.publicID()) origin.add(arrival) amplitude.setPickID(pick.publicID()) ep.add(amplitude) if stationMag is not None: stationMag.setWaveformID(streamID) origin.add(stationMag) stationMagContrib = DataModel.StationMagnitudeContribution( ) stationMagContrib.setStationMagnitudeID( stationMag.publicID()) magnitude.add(stationMagContrib) pick = None staCode = None compCode = None stationMag = None continue # empty key elif len(a) == 1: Logging.warning('Line %i: key without value' % iLine) continue value = a[1].strip() if pick is None: pick = DataModel.Pick.Create() arrival = DataModel.Arrival() amplitude = DataModel.Amplitude.Create() try: ############################################################## # station parameters # station code if keyLower == 'station code': staCode = value # pick time elif keyLower == 'onset time': pick.setTime(DataModel.TimeQuantity(self.parseTime(value))) # pick onset type elif keyLower == 'onset type': found = False for onset in [ DataModel.EMERGENT, DataModel.IMPULSIVE, DataModel.QUESTIONABLE ]: if value == DataModel.EPickOnsetNames_name(onset): pick.setOnset(onset) found = True break if not found: raise Exception('Unsupported onset value') # phase code elif keyLower == 'phase name': phase = DataModel.Phase() phase.setCode(value) pick.setPhaseHint(phase) arrival.setPhase(phase) # event type, added as origin comment later on elif keyLower == 'event type': originComments[key] = value # filter ID elif keyLower == 'applied filter': pick.setFilterID(value) # channel code, prepended by configured Channel prefix if only # one character is found elif keyLower == 'component': compCode = value # pick evaluation mode elif keyLower == 'pick type': found = False for mode in [DataModel.AUTOMATIC, DataModel.MANUAL]: if value == DataModel.EEvaluationModeNames_name(mode): pick.setEvaluationMode(mode) found = True break if not found: raise Exception('Unsupported evaluation mode value') # arrival weight elif keyLower == 'weight': arrival.setWeight(float(value)) # arrival azimuth elif keyLower == 'theo. azimuth (deg)': arrival.setAzimuth(float(value)) # arrival backazimuth elif keyLower == 'theo. backazimuth (deg)': pick.setBackazimuth(DataModel.RealQuantity(float(value))) # arrival distance elif keyLower == 'distance (deg)': arrival.setDistance(float(value)) # ignored elif keyLower == 'distance (km)': Logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) # arrival time residual elif keyLower == 'residual time': arrival.setTimeResidual(float(value)) # ignored elif keyLower == 'quality number': Logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) # station magnitude value and type elif keyLower.startswith('magnitude '): stationMag = DataModel.StationMagnitude.Create() stationMag.setAmplitudeID(amplitude.publicID()) stationMag.setMagnitude( DataModel.RealQuantity(float(value))) magType = self.parseMagType(key[10:]) if len(magType) > 0: stationMag.setType(magType) amplitude.setType(magType) ############################################################### # origin parameters # event ID, added as origin comment later on elif keyLower == 'event id': originComments[key] = value # magnitude value and type elif keyLower.startswith('mean magnitude '): magnitude.setMagnitude(DataModel.RealQuantity( float(value))) magType = self.parseMagType(key[15:]) if len(magType) > 0: magnitude.setType(magType) # latitude elif keyLower == 'latitude': origin.latitude().setValue(float(value)) latFound = True elif keyLower == 'error in latitude (km)': origin.latitude().setUncertainty(float(value)) # longitude elif keyLower == 'longitude': origin.longitude().setValue(float(value)) lonFound = True elif keyLower == 'error in longitude (km)': origin.longitude().setUncertainty(float(value)) # depth elif keyLower == 'depth (km)': origin.setDepth(DataModel.RealQuantity(float(value))) if depthError is not None: origin.depth().setUncertainty(depthError) elif keyLower == 'depth type': Logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) elif keyLower == 'error in depth (km)': depthError = float(value) try: origin.depth().setUncertainty(depthError) except Core.ValueException: pass # time elif keyLower == 'origin time': origin.time().setValue(self.parseTime(value)) elif keyLower == 'error in origin time': origin.time().setUncertainty(float(value)) # region table, added as origin comment later on elif keyLower == 'region table': originComments[key] = value # region table, added as origin comment later on elif keyLower == 'region id': originComments[key] = value # source region, added as origin comment later on elif keyLower == 'source region': originComments[key] = value # used station count elif keyLower == 'no. of stations used': if originQuality is None: originQuality = DataModel.OriginQuality() originQuality.setUsedStationCount(int(value)) # ignored elif keyLower == 'reference location name': Logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) # confidence ellipsoid major axis elif keyLower == 'error ellipse major': if originCE is None: originCE = DataModel.ConfidenceEllipsoid() originCE.setSemiMajorAxisLength(float(value)) # confidence ellipsoid minor axis elif keyLower == 'error ellipse minor': if originCE is None: originCE = DataModel.ConfidenceEllipsoid() originCE.setSemiMinorAxisLength(float(value)) # confidence ellipsoid rotation elif keyLower == 'error ellipse strike': if originCE is None: originCE = DataModel.ConfidenceEllipsoid() originCE.setMajorAxisRotation(float(value)) # azimuthal gap elif keyLower == 'max azimuthal gap (deg)': if originQuality is None: originQuality = DataModel.OriginQuality() originQuality.setAzimuthalGap(float(value)) # creation info author elif keyLower == 'author': origin.creationInfo().setAuthor(value) # creation info agency elif keyLower == 'agency': origin.creationInfo().setAgencyID(value) # earth model id elif keyLower == 'velocity model': origin.setEarthModelID(value) # standard error elif keyLower == 'rms of residuals (sec)': if originQuality is None: originQuality = DataModel.OriginQuality() originQuality.setStandardError(float(value)) # ignored elif keyLower == 'phase flags': Logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) # ignored elif keyLower == 'location input params': Logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) # unknown key else: Logging.warning('Line %i: ignoring unknown parameter: %s' \ % (iLine, key)) except ValueError, ve: Logging.warning('Line %i: can not parse %s value' % (iLine, key)) except Exception: Logging.error('Line %i: %s' % (iLine, str(traceback.format_exc()))) return None
def _processRequestText(self, req, ro, dbq, ep): lineCount = 0 line = "#EventID|Time|Latitude|Longitude|Depth/km|Author|Catalog|" \ "Contributor|ContributorID|MagType|Magnitude|MagAuthor|" \ "EventLocationName\n" df = "%FT%T.%f" utils.writeTS(req, line) byteCount = len(line) # add related information for iEvent in xrange(ep.eventCount()): e = ep.event(iEvent) eID = e.publicID() # query for preferred origin obj = dbq.getObject(DataModel.Origin.TypeInfo(), e.preferredOriginID()) o = DataModel.Origin.Cast(obj) if o is None: Logging.warning("preferred origin of event '%s' not found: %s" % ( eID, e.preferredOriginID())) continue # depth try: depth = str(o.depth().value()) except ValueException: depth = '' # author if self._hideAuthor: author = '' else: try: author = o.creationInfo().author() except ValueException: author = '' # contributor try: contrib = e.creationInfo().agencyID() except ValueException: contrib = '' # query for preferred magnitude (if any) mType, mVal, mAuthor = '', '', '' if e.preferredMagnitudeID(): obj = dbq.getObject(DataModel.Magnitude.TypeInfo(), e.preferredMagnitudeID()) m = DataModel.Magnitude.Cast(obj) if m is not None: mType = m.type() mVal = str(m.magnitude().value()) if self._hideAuthor: mAuthor = '' else: try: mAuthor = m.creationInfo().author() except ValueException: pass # event description dbq.loadEventDescriptions(e) region = '' for i in xrange(e.eventDescriptionCount()): ed = e.eventDescription(i) if ed.type() == DataModel.REGION_NAME: region = ed.text() break if req._disconnected: return False line = "%s|%s|%f|%f|%s|%s||%s|%s|%s|%s|%s|%s\n" % ( eID, o.time().value().toString(df), o.latitude().value(), o.longitude().value(), depth, author, contrib, eID, mType, mVal, mAuthor, region) utils.writeTS(req, line) lineCount +=1 byteCount += len(line) # write response Logging.debug("%s: returned %i events (total bytes: %i) " % ( ro.service, lineCount, byteCount)) utils.accessLog(req, ro, http.OK, byteCount, None) return True
verified = self.__gpg.decrypt(request.content.getvalue()) except OSError, e: msg = "gpg decrypt error" Logging.warning("%s: %s" % (msg, str(e))) return self.renderErrorPage(request, http.INTERNAL_SERVER_ERROR, msg) except Exception, e: msg = "invalid token" Logging.warning("%s: %s" % (msg, str(e))) return self.renderErrorPage(request, http.BAD_REQUEST, msg) if verified.trust_level is None or verified.trust_level < verified.TRUST_FULLY: msg = "token has invalid signature" Logging.warning(msg) return self.renderErrorPage(request, http.BAD_REQUEST, msg) try: attributes = json.loads(verified.data) td = dateutil.parser.parse(attributes['valid_until']) - \ datetime.datetime.now(dateutil.tz.tzutc()) lifetime = td.seconds + td.days * 24 * 3600 except Exception, e: msg = "token has invalid validity" Logging.warning("%s: %s" % (msg, str(e))) return self.renderErrorPage(request, http.BAD_REQUEST, msg) if lifetime <= 0: msg = "token is expired"