def input(self): fastsdsPrefix = 'fastsds://' if self.__url.startswith(fastsdsPrefix): fastsds = SDS(self.__url[len(fastsdsPrefix):]) else: fastsds = None for (net, sta, loc, cha, startt, endt, restricted, archNet) in self.__tw: if not archNet: archNet = net size = 0 if fastsds: start = dateutil.parser.parse( startt.iso()).replace(tzinfo=None) end = dateutil.parser.parse(endt.iso()).replace(tzinfo=None) for data in fastsds.getRawBytes(start, end, archNet, sta, loc, cha, self.__bufferSize): size += len(data) if archNet == net: yield data else: try: yield self.__override_network(data, net) except Exception as e: Logging.error( "could not override network code: %s" % str(e)) else: rs = RecordStream.Open(self.__url) if rs is None: Logging.error("could not open record stream") break rs.addStream(archNet, sta, loc, cha, startt, endt) rsInput = RecordInput(rs, Array.INT, Record.SAVE_RAW) eof = False while not eof: data = b"" while len(data) < self.__bufferSize: try: rec = rsInput.next() except Exception as e: Logging.error("%s" % str(e)) eof = True break if rec is None: eof = True break data += rec.raw().str() if data: size += len(data) if archNet == net: yield data else: try: yield self.__override_network(data, net) except Exception as e: Logging.error("could not override network " \ "code: %s" % str(e)) for tracker in self.__trackerList: net_class = 't' if net[0] in "0123456789XYZ" else 'p' if size == 0: tracker.line_status(startt, endt, net, sta, cha, loc, restricted, net_class, True, [], "fdsnws", "NODATA", 0, "") else: tracker.line_status(startt, endt, net, sta, cha, loc, restricted, net_class, True, [], "fdsnws", "OK", size, "")
def _processRequest(self, req, ro): if ro.quality != 'B' and ro.quality != 'M': msg = "quality other than 'B' or 'M' not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.minimumLength: msg = "enforcing of minimum record length not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.longestOnly: msg = "limitation to longest segment not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) app = Application.Instance() ro._checkTimes(app._realtimeGap) # Open record stream rs = RecordStream.Open(self._rsURL) if rs is None: msg = "could not open record stream" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) maxSamples = None if app._samplesM is not None: maxSamples = app._samplesM * 1000000 samples = 0 app = Application.Instance() if app._trackdbEnabled: userid = ro.userName or app._trackdbDefaultUser reqid = 'ws' + str(int(round(time.time() * 1000) - 1420070400000)) tracker = RequestTrackerDB("fdsnws", app.connection(), reqid, "WAVEFORM", userid, "REQUEST WAVEFORM " + reqid, "fdsnws", req.getClientIP(), req.getClientIP()) else: tracker = None # Add request streams # iterate over inventory networks for s in ro.streams: for net in self._networkIter(s): for sta in self._stationIter(net, s): for loc in self._locationIter(sta, s): for cha in self._streamIter(loc, s): if utils.isRestricted(cha) and \ (not self.__user or (self.__access and not self.__access.authorize(self.__user, net.code(), sta.code(), loc.code(), cha.code(), s.time.start, s.time.end))): continue # enforce maximum sample per request restriction if maxSamples is not None: try: n = cha.sampleRateNumerator() d = cha.sampleRateDenominator() except ValueException: msg = "skipping stream without sampling " \ "rate definition: %s.%s.%s.%s" % ( net.code(), sta.code(), loc.code(), cha.code()) Logging.warning(msg) continue # calculate number of samples for requested # time window diffSec = (s.time.end - s.time.start).length() samples += int(diffSec * n / d) if samples > maxSamples: msg = "maximum number of %sM samples " \ "exceeded" % str(app._samplesM) return HTTP.renderErrorPage( req, http.REQUEST_ENTITY_TOO_LARGE, msg, ro) Logging.debug("adding stream: %s.%s.%s.%s %s - %s" \ % (net.code(), sta.code(), loc.code(), cha.code(), s.time.start.iso(), s.time.end.iso())) rs.addStream(net.code(), sta.code(), loc.code(), cha.code(), s.time.start, s.time.end) if tracker: tracker.line_status(s.time.start, s.time.end, net.code(), sta.code(), cha.code(), loc.code(), False, "", True, [], "fdsnws", "OK", 0, "") # Build output filename fileName = Application.Instance()._fileNamePrefix.replace( "%time", time.strftime('%Y-%m-%dT%H:%M:%S')) + '.mseed' # Create producer for async IO req.registerProducer(_WaveformProducer(req, ro, rs, fileName, tracker), False) # The request is handled by the deferred object return server.NOT_DONE_YET
def input(self): fastsdsPrefix = 'fastsds://' if self.__url.startswith(fastsdsPrefix): fastsds = SDS(self.__url[len(fastsdsPrefix):]) else: fastsds = None for (net, sta, loc, cha, startt, endt, restricted, archNet) in self.__tw: if not archNet: archNet = net size = 0 if fastsds: start = dateutil.parser.parse( startt.iso()).replace(tzinfo=None) end = dateutil.parser.parse(endt.iso()).replace(tzinfo=None) for data in fastsds.getRawBytes(start, end, archNet, sta, loc, cha, self.__bufferSize): size += len(data) if archNet == net: yield data else: try: yield self.__override_network(data, net) except Exception, e: Logging.error( "could not override network code: %s" % str(e)) else: rs = RecordStream.Open(self.__url) if rs is None: Logging.error("could not open record stream") break rs.addStream(archNet, sta, loc, cha, startt, endt) rsInput = RecordInput(rs, Array.INT, Record.SAVE_RAW) eof = False while not eof: data = "" while len(data) < self.__bufferSize: try: rec = rsInput.next() except Exception, e: Logging.error("%s" % str(e)) eof = True break if rec is None: eof = True break data += rec.raw().str() if data: size += len(data) if archNet == net: yield data else: try: yield self.__override_network(data, net) except Exception, e: Logging.error( "could not override network code: %s" % str(e))
def _processRequest(self, req, ro): if ro.quality != 'B' and ro.quality != 'M': msg = "quality other than 'B' or 'M' not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.minimumLength: msg = "enforcing of minimum record length not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.longestOnly: msg = "limitation to longest segment not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) app = Application.Instance() ro._checkTimes(app._realtimeGap) # Open record stream rs = RecordStream.Open(self._rsURL) if rs is None: msg = "could not open record stream" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) maxSamples = None if app._samplesM is not None: maxSamples = app._samplesM * 1000000 samples = 0 # Add request streams # iterate over inventory networks inv = Application.Instance()._inv for s in ro.streams: for net in self._networkIter(inv, s): if ro.userName is None and utils.isRestricted(net): continue for sta in self._stationIter(net, s): if ro.userName is None and utils.isRestricted(sta): continue for loc in self._locationIter(sta, s): for cha in self._streamIter(loc, s): # enforce maximum sample per request restriction if maxSamples is not None: try: n = cha.sampleRateNumerator() d = cha.sampleRateDenominator() except ValueException: msg = "skipping stream without sampling " \ "rate definition: %s.%s.%s.%s" % ( net.code(), sta.code(), loc.code(), cha.code()) Logging.warning(msg) continue # calculate number of samples for requested # time window diffSec = (s.time.end - s.time.start).length() samples += int(diffSec * n / d) if samples > maxSamples: msg = "maximum number of %sM samples " \ "exceeded" % str(app._samplesM) return HTTP.renderErrorPage( req, http.REQUEST_ENTITY_TOO_LARGE, msg, ro) Logging.debug("adding stream: %s.%s.%s.%s %s - %s" \ % (net.code(), sta.code(), loc.code(), cha.code(), s.time.start.iso(), s.time.end.iso())) rs.addStream(net.code(), sta.code(), loc.code(), cha.code(), s.time.start, s.time.end) # Build output filename fileName = Application.Instance()._fileNamePrefix + '.mseed' # Create producer for async IO req.registerProducer(_WaveformProducer(req, ro, rs, fileName), False) # The request is handled by the deferred object return server.NOT_DONE_YET