def _processStation(newNet, net, sta, ro, skipRestricted): chaCount = 0 dataloggers, sensors = set(), set() newSta = DataModel.Station(sta) # Copy comments for i in xrange(sta.commentCount()): newSta.add(DataModel.Comment(sta.comment(i))) for loc in ro.locationIter(net, sta, True): newLoc = DataModel.SensorLocation(loc) # Copy comments for i in xrange(loc.commentCount()): newLoc.add(DataModel.Comment(loc.comment(i))) for stream in ro.streamIter(net, sta, loc, True): if skipRestricted and utils.isRestricted(stream): continue newCha = DataModel.Stream(stream) # Copy comments for i in xrange(stream.commentCount()): newCha.add(DataModel.Comment(stream.comment(i))) newLoc.add(newCha) dataloggers.add(stream.datalogger()) sensors.add(stream.sensor()) if newLoc.streamCount() > 0: newSta.add(newLoc) chaCount += newLoc.streamCount() if newSta.sensorLocationCount() > 0: newNet.add(newSta) return chaCount, newSta.sensorLocationCount(), dataloggers, sensors return 0, 0, [], []
def _processStation(newNet, net, sta, ro, dac, skipRestricted): chaCount = 0 dataloggers, sensors, extents = set(), set(), set() newSta = DataModel.Station(sta) includeAvailability = dac is not None and ro.availability # Copy comments for i in xrange(sta.commentCount()): newSta.add(DataModel.Comment(sta.comment(i))) for loc in ro.locationIter(net, sta, True): newLoc = DataModel.SensorLocation(loc) # Copy comments for i in xrange(loc.commentCount()): newLoc.add(DataModel.Comment(loc.comment(i))) for stream in ro.streamIter(net, sta, loc, True, dac): if skipRestricted and utils.isRestricted(stream): continue newCha = DataModel.Stream(stream) # Copy comments for i in xrange(stream.commentCount()): newCha.add(DataModel.Comment(stream.comment(i))) newLoc.add(newCha) dataloggers.add(stream.datalogger()) sensors.add(stream.sensor()) if includeAvailability: ext = dac.extent(net.code(), sta.code(), loc.code(), stream.code()) if ext is not None: extents.add(ext) if newLoc.streamCount() > 0: newSta.add(newLoc) chaCount += newLoc.streamCount() if newSta.sensorLocationCount() > 0: newNet.add(newSta) return chaCount, newSta.sensorLocationCount(), dataloggers, \ sensors, extents return 0, 0, [], [], []
def _processRequestText(self, req, ro, dac): if req._disconnected: return False skipRestricted = not self._allowRestricted or \ (ro.restricted is not None and not ro.restricted) data = "" lines = [] # level = network if not ro.includeSta: data = "#Network|Description|StartTime|EndTime|TotalStations\n" # iterate over inventory networks for net in ro.networkIter(self._inv, True): if req._disconnected: return False if skipRestricted and utils.isRestricted(net): continue # at least one matching station is required stationFound = False for sta in ro.stationIter(net, False): if req._disconnected: return False if self._matchStation(net, sta, ro, dac) and \ not (skipRestricted and utils.isRestricted(sta)): stationFound = True break if not stationFound: continue start, end = self._formatEpoch(net) lines.append( ("%s %s" % (net.code(), start), "%s|%s|%s|%s|%i\n" % (net.code(), net.description(), start, end, net.stationCount()))) # level = station elif not ro.includeCha: data = "#Network|Station|Latitude|Longitude|Elevation|" \ "SiteName|StartTime|EndTime\n" # iterate over inventory networks for net in ro.networkIter(self._inv, False): if req._disconnected: return False if skipRestricted and utils.isRestricted(net): continue # iterate over inventory stations for sta in ro.stationIter(net, True): if req._disconnected: return False if not self._matchStation(net, sta, ro, dac) or \ (skipRestricted and utils.isRestricted(sta)): continue try: lat = str(sta.latitude()) except ValueError: lat = '' try: lon = str(sta.longitude()) except ValueError: lon = '' try: elev = str(sta.elevation()) except ValueError: elev = '' try: desc = sta.description() except ValueError: desc = '' start, end = self._formatEpoch(sta) lines.append(("%s.%s %s" % (net.code(), sta.code(), start), "%s|%s|%s|%s|%s|%s|%s|%s\n" % (net.code(), sta.code(), lat, lon, elev, desc, start, end))) # level = channel (resonse level not supported in text format) else: data = "#Network|Station|Location|Channel|Latitude|Longitude|" \ "Elevation|Depth|Azimuth|Dip|SensorDescription|Scale|" \ "ScaleFreq|ScaleUnits|SampleRate|StartTime|EndTime\n" # iterate over inventory networks for net in ro.networkIter(self._inv, False): if req._disconnected: return False if skipRestricted and utils.isRestricted(net): continue # iterate over inventory stations, locations, streams for sta in ro.stationIter(net, False): if req._disconnected: return False if skipRestricted and utils.isRestricted(sta): continue for loc in ro.locationIter(net, sta, True): for stream in ro.streamIter(net, sta, loc, True, dac): if skipRestricted and utils.isRestricted(stream): continue try: lat = str(loc.latitude()) except ValueError: lat = '' try: lon = str(loc.longitude()) except ValueError: lon = '' try: elev = str(loc.elevation()) except ValueError: elev = '' try: depth = str(stream.depth()) except ValueError: depth = '' try: azi = str(stream.azimuth()) except ValueError: azi = '' try: dip = str(stream.dip()) except ValueError: dip = '' desc = '' try: sensor = self._inv.findSensor(stream.sensor()) if sensor is not None: desc = sensor.description() except ValueError: pass try: scale = str(stream.gain()) except ValueError: scale = '' try: scaleFreq = str(stream.gainFrequency()) except ValueError: scaleFreq = '' try: scaleUnit = str(stream.gainUnit()) except ValueError: scaleUnit = '' try: sr = str(stream.sampleRateNumerator() / float(stream.sampleRateDenominator())) except ValueError, ZeroDevisionError: sr = '' start, end = self._formatEpoch(stream) lines.append( ("%s.%s.%s.%s %s" % (net.code(), sta.code(), loc.code(), stream.code(), start), "%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|" "%s|%s|%s|%s|%s|%s\n" % (net.code(), sta.code(), loc.code(), stream.code(), lat, lon, elev, depth, azi, dip, desc, scale, scaleFreq, scaleUnit, sr, start, end)))
def _processRequestExp(self, req, ro, exp, dac): if req._disconnected: return False staCount, locCount, chaCount, extCount, objCount = 0, 0, 0, 0, 0 DataModel.PublicObject.SetRegistrationEnabled(False) newInv = DataModel.Inventory() dataloggers, sensors, extents = set(), set(), set() skipRestricted = not self._allowRestricted or \ (ro.restricted is not None and not ro.restricted) levelNet = not ro.includeSta levelSta = ro.includeSta and not ro.includeCha # iterate over inventory networks for net in ro.networkIter(self._inv, levelNet): if req._disconnected: return False if skipRestricted and utils.isRestricted(net): continue newNet = DataModel.Network(net) # Copy comments for i in xrange(net.commentCount()): newNet.add(DataModel.Comment(net.comment(i))) # iterate over inventory stations of current network for sta in ro.stationIter(net, levelSta): if req._disconnected: return False if skipRestricted and utils.isRestricted(sta): continue if not self.checkObjects(req, objCount, self._maxObj): return False if ro.includeCha: numCha, numLoc, d, s, e = \ self._processStation(newNet, net, sta, ro, dac, skipRestricted) if numCha > 0: locCount += numLoc chaCount += numCha extCount += len(e) objCount += numLoc + numCha + extCount if not self.checkObjects(req, objCount, self._maxObj): return False dataloggers |= d sensors |= s extents |= e elif self._matchStation(net, sta, ro, dac): if ro.includeSta: newSta = DataModel.Station(sta) # Copy comments for i in xrange(sta.commentCount()): newSta.add(DataModel.Comment(sta.comment(i))) newNet.add(newSta) else: # no station output requested: one matching station # is sufficient to include the network newInv.add(newNet) objCount += 1 break if newNet.stationCount() > 0: newInv.add(newNet) staCount += newNet.stationCount() objCount += staCount + 1 # Return 204 if no matching inventory was found if newInv.networkCount() == 0: msg = "no matching inventory found" data = self.renderErrorPage(req, http.NO_CONTENT, msg, ro) if data: utils.writeTS(req, data) return True # Copy references (dataloggers, responses, sensors) decCount, resCount = 0, 0 if ro.includeCha: decCount = self._copyReferences(newInv, req, objCount, self._inv, ro, dataloggers, sensors, self._maxObj) if decCount is None: return False else: resCount = newInv.responsePAZCount() + \ newInv.responseFIRCount() + \ newInv.responsePolynomialCount() + \ newInv.responseFAPCount() + \ newInv.responseIIRCount() objCount += resCount + decCount + newInv.dataloggerCount() + \ newInv.sensorCount() # Copy data extents objOut = newInv if len(extents) > 0: objCount += 1 da = DataModel.DataAvailability() for e in extents: da.add(DataModel.DataExtent(e)) objOut = ExportObjectList() objOut.append(newInv) objOut.append(da) sink = utils.Sink(req) if not exp.write(sink, objOut): return False Logging.debug( "%s: returned %iNet, %iSta, %iLoc, %iCha, " "%iDL, %iDec, %iSen, %iRes, %iDAExt (total objects/" "bytes: %i/%i) " % (ro.service, newInv.networkCount(), staCount, locCount, chaCount, newInv.dataloggerCount(), decCount, newInv.sensorCount(), resCount, extCount, objCount, sink.written)) utils.accessLog(req, ro, http.OK, sink.written, None) return True
def _processRequest(self, req, ro): if ro.quality != 'B' and ro.quality != 'M': msg = "quality other than 'B' or 'M' not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.minimumLength: msg = "enforcing of minimum record length not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.longestOnly: msg = "limitation to longest segment not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) app = Application.Instance() ro._checkTimes(app._realtimeGap) # Open record stream rs = RecordStream.Open(self._rsURL) if rs is None: msg = "could not open record stream" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) maxSamples = None if app._samplesM is not None: maxSamples = app._samplesM * 1000000 samples = 0 app = Application.Instance() if app._trackdbEnabled: userid = ro.userName or app._trackdbDefaultUser reqid = 'ws' + str(int(round(time.time() * 1000) - 1420070400000)) tracker = RequestTrackerDB("fdsnws", app.connection(), reqid, "WAVEFORM", userid, "REQUEST WAVEFORM " + reqid, "fdsnws", req.getClientIP(), req.getClientIP()) else: tracker = None # Add request streams # iterate over inventory networks for s in ro.streams: for net in self._networkIter(s): for sta in self._stationIter(net, s): for loc in self._locationIter(sta, s): for cha in self._streamIter(loc, s): if utils.isRestricted(cha) and (self.__user is None or \ not self.__access.authorize(self.__user, net.code(), sta.code(), loc.code(), cha.code(), s.time.start, s.time.end)): continue # enforce maximum sample per request restriction if maxSamples is not None: try: n = cha.sampleRateNumerator() d = cha.sampleRateDenominator() except ValueException: msg = "skipping stream without sampling " \ "rate definition: %s.%s.%s.%s" % ( net.code(), sta.code(), loc.code(), cha.code()) Logging.warning(msg) continue # calculate number of samples for requested # time window diffSec = (s.time.end - s.time.start).length() samples += int(diffSec * n / d) if samples > maxSamples: msg = "maximum number of %sM samples " \ "exceeded" % str(app._samplesM) return HTTP.renderErrorPage(req, http.REQUEST_ENTITY_TOO_LARGE, msg, ro) Logging.debug("adding stream: %s.%s.%s.%s %s - %s" \ % (net.code(), sta.code(), loc.code(), cha.code(), s.time.start.iso(), s.time.end.iso())) rs.addStream(net.code(), sta.code(), loc.code(), cha.code(), s.time.start, s.time.end) if tracker: tracker.line_status(s.time.start, s.time.end, net.code(), sta.code(), cha.code(), loc.code(), False, "", True, [], "fdsnws", "OK", 0, "") # Build output filename fileName = Application.Instance()._fileNamePrefix.replace("%time", time.strftime('%Y-%m-%dT%H:%M:%S'))+'.mseed' # Create producer for async IO req.registerProducer(_WaveformProducer(req, ro, rs, fileName, tracker), False) # The request is handled by the deferred object return server.NOT_DONE_YET
def _processRequest(self, req, ro): if ro.quality != 'B' and ro.quality != 'M': msg = "quality other than 'B' or 'M' not supported" return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro) if ro.minimumLength: msg = "enforcing of minimum record length not supported" return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro) if ro.longestOnly: msg = "limitation to longest segment not supported" return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro) app = Application.Instance() ro._checkTimes(app._realtimeGap) maxSamples = None if app._samplesM is not None: maxSamples = app._samplesM * 1000000 samples = 0 app = Application.Instance() if app._trackdbEnabled: userid = ro.userName or app._trackdbDefaultUser reqid = 'ws' + str(int(round(time.time() * 1000) - 1420070400000)) xff = req.requestHeaders.getRawHeaders("x-forwarded-for") if xff: userIP = xff[0].split(",")[0].strip() else: userIP = req.getClientIP() tracker = RequestTrackerDB("fdsnws", app.connection(), reqid, "WAVEFORM", userid, "REQUEST WAVEFORM " + reqid, "fdsnws", userIP, req.getClientIP()) else: tracker = None # Open record stream rs = _MyRecordStream(self._rsURL, tracker, self.__bufferSize) forbidden = None # Add request streams # iterate over inventory networks for s in ro.streams: for net in self._networkIter(s): for sta in self._stationIter(net, s): for loc in self._locationIter(sta, s): for cha in self._streamIter(loc, s): try: start_time = max(cha.start(), s.time.start) except Exception: start_time = s.time.start try: end_time = min(cha.end(), s.time.end) except Exception: end_time = s.time.end if utils.isRestricted(cha) and \ (not self.__user or (self.__access and not self.__access.authorize(self.__user, net.code(), sta.code(), loc.code(), cha.code(), start_time, end_time))): if tracker: net_class = 't' if net.code( )[0] in "0123456789XYZ" else 'p' tracker.line_status( start_time, end_time, net.code(), sta.code(), cha.code(), loc.code(), True, net_class, True, [], "fdsnws", "DENIED", 0, "") forbidden = forbidden or (forbidden is None) continue forbidden = False # enforce maximum sample per request restriction if maxSamples is not None: try: n = cha.sampleRateNumerator() d = cha.sampleRateDenominator() except ValueError: msg = "skipping stream without sampling " \ "rate definition: %s.%s.%s.%s" % ( net.code(), sta.code(), loc.code(), cha.code()) Logging.warning(msg) continue # calculate number of samples for requested # time window diffSec = (end_time - start_time).length() samples += int(diffSec * n / d) if samples > maxSamples: msg = "maximum number of %sM samples " \ "exceeded" % str(app._samplesM) return HTTP.renderErrorPage( req, http.REQUEST_ENTITY_TOO_LARGE, msg, ro) Logging.debug("adding stream: %s.%s.%s.%s %s - %s" \ % (net.code(), sta.code(), loc.code(), cha.code(), start_time.iso(), end_time.iso())) rs.addStream(net.code(), sta.code(), loc.code(), cha.code(), start_time, end_time, utils.isRestricted(cha), sta.archiveNetworkCode()) if forbidden: if tracker: tracker.volume_status("fdsnws", "NODATA", 0, "") tracker.request_status("END", "") msg = "access denied" return HTTP.renderErrorPage(req, http.FORBIDDEN, msg, ro) elif forbidden is None: if tracker: tracker.volume_status("fdsnws", "NODATA", 0, "") tracker.request_status("END", "") msg = "no metadata found" return HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro) # Build output filename fileName = Application.Instance()._fileNamePrefix.replace( "%time", time.strftime('%Y-%m-%dT%H:%M:%S')) + '.mseed' # Create producer for async IO prod = _WaveformProducer(req, ro, rs, fileName, tracker) req.registerProducer(prod, True) prod.resumeProducing() # The request is handled by the deferred object return server.NOT_DONE_YET
def _processRequestText(self, req, ro, dac): if req._disconnected: return False skipRestricted = not self._allowRestricted or \ (ro.restricted is not None and not ro.restricted) data = "" lines = [] # level = network if not ro.includeSta: data = "#Network|Description|StartTime|EndTime|TotalStations\n" # iterate over inventory networks for net in ro.networkIter(self._inv, True): if req._disconnected: return False if skipRestricted and utils.isRestricted(net): continue # at least one matching station is required stationFound = False for sta in ro.stationIter(net, False): if req._disconnected: return False if self._matchStation(net, sta, ro, dac) and \ not (skipRestricted and utils.isRestricted(sta)): stationFound = True break if not stationFound: continue start, end = self._formatEpoch(net) lines.append(("%s %s" % (net.code(), start), "%s|%s|%s|%s|%i\n" % ( net.code(), net.description(), start, end, net.stationCount()))) # level = station elif not ro.includeCha: data = "#Network|Station|Latitude|Longitude|Elevation|" \ "SiteName|StartTime|EndTime\n" # iterate over inventory networks for net in ro.networkIter(self._inv, False): if req._disconnected: return False if skipRestricted and utils.isRestricted(net): continue # iterate over inventory stations for sta in ro.stationIter(net, True): if req._disconnected: return False if not self._matchStation(net, sta, ro, dac) or \ (skipRestricted and utils.isRestricted(sta)): continue try: lat = str(sta.latitude()) except ValueError: lat = '' try: lon = str(sta.longitude()) except ValueError: lon = '' try: elev = str(sta.elevation()) except ValueError: elev = '' try: desc = sta.description() except ValueError: desc = '' start, end = self._formatEpoch(sta) lines.append(("%s.%s %s" % (net.code(), sta.code(), start), "%s|%s|%s|%s|%s|%s|%s|%s\n" % ( net.code(), sta.code(), lat, lon, elev, desc, start, end))) # level = channel (resonse level not supported in text format) else: data = "#Network|Station|Location|Channel|Latitude|Longitude|" \ "Elevation|Depth|Azimuth|Dip|SensorDescription|Scale|" \ "ScaleFreq|ScaleUnits|SampleRate|StartTime|EndTime\n" # iterate over inventory networks for net in ro.networkIter(self._inv, False): if req._disconnected: return False if skipRestricted and utils.isRestricted(net): continue # iterate over inventory stations, locations, streams for sta in ro.stationIter(net, False): if req._disconnected: return False if skipRestricted and utils.isRestricted(sta): continue for loc in ro.locationIter(net, sta, True): for stream in ro.streamIter(net, sta, loc, True, dac): if skipRestricted and utils.isRestricted(stream): continue try: lat = str(loc.latitude()) except ValueError: lat = '' try: lon = str(loc.longitude()) except ValueError: lon = '' try: elev = str(loc.elevation()) except ValueError: elev = '' try: depth = str(stream.depth()) except ValueError: depth = '' try: azi = str(stream.azimuth()) except ValueError: azi = '' try: dip = str(stream.dip()) except ValueError: dip = '' desc = '' try: sensor = self._inv.findSensor(stream.sensor()) if sensor is not None: desc = sensor.description() except ValueError: pass try: scale = str(stream.gain()) except ValueError: scale = '' try: scaleFreq = str(stream.gainFrequency()) except ValueError: scaleFreq = '' try: scaleUnit = str(stream.gainUnit()) except ValueError: scaleUnit = '' try: sr = str(stream.sampleRateNumerator() / float(stream.sampleRateDenominator())) except ValueError, ZeroDevisionError: sr = '' start, end = self._formatEpoch(stream) lines.append(("%s.%s.%s.%s %s" % ( net.code(), sta.code(), loc.code(), stream.code(), start), "%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|" \ "%s|%s|%s|%s|%s|%s\n" % ( net.code(), sta.code(), loc.code(), stream.code(), lat, lon, elev, depth, azi, dip, desc, scale, scaleFreq, scaleUnit, sr, start, end)))
def _processRequestExp(self, req, ro, exp, dac): if req._disconnected: return False staCount, locCount, chaCount, extCount, objCount = 0, 0, 0, 0, 0 DataModel.PublicObject.SetRegistrationEnabled(False) newInv = DataModel.Inventory() dataloggers, sensors, extents = set(), set(), set() skipRestricted = not self._allowRestricted or \ (ro.restricted is not None and not ro.restricted) levelNet = not ro.includeSta levelSta = ro.includeSta and not ro.includeCha # iterate over inventory networks for net in ro.networkIter(self._inv, levelNet): if req._disconnected: return False if skipRestricted and utils.isRestricted(net): continue newNet = DataModel.Network(net) # Copy comments for i in xrange(net.commentCount()): newNet.add(DataModel.Comment(net.comment(i))) # iterate over inventory stations of current network for sta in ro.stationIter(net, levelSta): if req._disconnected: return False if skipRestricted and utils.isRestricted(sta): continue if not HTTP.checkObjects(req, objCount, self._maxObj): return False if ro.includeCha: numCha, numLoc, d, s, e = \ self._processStation(newNet, net, sta, ro, dac, skipRestricted) if numCha > 0: locCount += numLoc chaCount += numCha extCount += len(e) objCount += numLoc + numCha + extCount if not HTTP.checkObjects(req, objCount, self._maxObj): return False dataloggers |= d sensors |= s extents |= e elif self._matchStation(net, sta, ro, dac): if ro.includeSta: newSta = DataModel.Station(sta) # Copy comments for i in xrange(sta.commentCount()): newSta.add(DataModel.Comment(sta.comment(i))) newNet.add(newSta) else: # no station output requested: one matching station # is sufficient to include the network newInv.add(newNet) objCount += 1 break if newNet.stationCount() > 0: newInv.add(newNet) staCount += newNet.stationCount() objCount += staCount + 1 # Return 204 if no matching inventory was found if newInv.networkCount() == 0: msg = "no matching inventory found" data = HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro) if data: utils.writeTS(req, data) return True # Copy references (dataloggers, responses, sensors) decCount, resCount = 0, 0 if ro.includeCha: decCount = self._copyReferences(newInv, req, objCount, self._inv, ro, dataloggers, sensors, self._maxObj) if decCount is None: return False else: resCount = newInv.responsePAZCount() + \ newInv.responseFIRCount() + \ newInv.responsePolynomialCount() + \ newInv.responseFAPCount() + \ newInv.responseIIRCount() objCount += resCount + decCount + newInv.dataloggerCount() + \ newInv.sensorCount() # Copy data extents objOut = newInv if len(extents) > 0: objCount += 1 da = DataModel.DataAvailability() for e in extents: da.add(DataModel.DataExtent(e)) objOut = ExportObjectList() objOut.append(newInv) objOut.append(da) sink = utils.Sink(req) if not exp.write(sink, objOut): return False Logging.debug("%s: returned %iNet, %iSta, %iLoc, %iCha, " \ "%iDL, %iDec, %iSen, %iRes, %iDAExt (total objects/" \ "bytes: %i/%i) " % (ro.service, newInv.networkCount(), staCount, locCount, chaCount, newInv.dataloggerCount(), decCount, newInv.sensorCount(), resCount, extCount, objCount, sink.written)) utils.accessLog(req, ro, http.OK, sink.written, None) return True
def _processRequest(self, req, ro): if ro.quality != 'B' and ro.quality != 'M': msg = "quality other than 'B' or 'M' not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.minimumLength: msg = "enforcing of minimum record length not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.longestOnly: msg = "limitation to longest segment not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) app = Application.Instance() ro._checkTimes(app._realtimeGap) # Open record stream rs = RecordStream.Open(self._rsURL) if rs is None: msg = "could not open record stream" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) maxSamples = None if app._samplesM is not None: maxSamples = app._samplesM * 1000000 samples = 0 # Add request streams # iterate over inventory networks for s in ro.streams: for net in self._networkIter(s): if ro.userName is None and utils.isRestricted(net): continue for sta in self._stationIter(net, s): if ro.userName is None and utils.isRestricted(sta): continue for loc in self._locationIter(sta, s): for cha in self._streamIter(loc, s): # enforce maximum sample per request restriction if maxSamples is not None: try: n = cha.sampleRateNumerator() d = cha.sampleRateDenominator() except ValueException: msg = "skipping stream without sampling " \ "rate definition: %s.%s.%s.%s" % ( net.code(), sta.code(), loc.code(), cha.code()) Logging.warning(msg) continue # calculate number of samples for requested # time window diffSec = (s.time.end - s.time.start).length() samples += int(diffSec * n / d) if samples > maxSamples: msg = "maximum number of %sM samples " \ "exceeded" % str(app._samplesM) return HTTP.renderErrorPage(req, http.REQUEST_ENTITY_TOO_LARGE, msg, ro) Logging.debug("adding stream: %s.%s.%s.%s %s - %s" \ % (net.code(), sta.code(), loc.code(), cha.code(), s.time.start.iso(), s.time.end.iso())) rs.addStream(net.code(), sta.code(), loc.code(), cha.code(), s.time.start, s.time.end) # Build output filename fileName = Application.Instance()._fileNamePrefix+'.mseed' # Create producer for async IO req.registerProducer(_WaveformProducer(req, ro, rs, fileName), False) # The request is handled by the deferred object return server.NOT_DONE_YET
def _processRequest(self, req, ro): if ro.quality != 'B' and ro.quality != 'M': msg = "quality other than 'B' or 'M' not supported" return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro) if ro.minimumLength: msg = "enforcing of minimum record length not supported" return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro) if ro.longestOnly: msg = "limitation to longest segment not supported" return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro) app = Application.Instance() ro._checkTimes(app._realtimeGap) maxSamples = None if app._samplesM is not None: maxSamples = app._samplesM * 1000000 samples = 0 app = Application.Instance() if app._trackdbEnabled: userid = ro.userName or app._trackdbDefaultUser reqid = 'ws' + str(int(round(time.time() * 1000) - 1420070400000)) xff = req.requestHeaders.getRawHeaders("x-forwarded-for") if xff: userIP = xff[0].split(",")[0].strip() else: userIP = req.getClientIP() tracker = RequestTrackerDB("fdsnws", app.connection(), reqid, "WAVEFORM", userid, "REQUEST WAVEFORM " + reqid, "fdsnws", userIP, req.getClientIP()) else: tracker = None # Open record stream rs = _MyRecordStream(self._rsURL, tracker, self.__bufferSize) forbidden = None # Add request streams # iterate over inventory networks for s in ro.streams: for net in self._networkIter(s): for sta in self._stationIter(net, s): for loc in self._locationIter(sta, s): for cha in self._streamIter(loc, s): try: start_time = max(cha.start(), s.time.start) except Exception: start_time = s.time.start try: end_time = min(cha.end(), s.time.end) except Exception: end_time = s.time.end if utils.isRestricted(cha) and \ (not self.__user or (self.__access and not self.__access.authorize(self.__user, net.code(), sta.code(), loc.code(), cha.code(), start_time, end_time))): if tracker: net_class = 't' if net.code()[0] in "0123456789XYZ" else 'p' tracker.line_status(start_time, end_time, net.code(), sta.code(), cha.code(), loc.code(), True, net_class, True, [], "fdsnws", "DENIED", 0, "") forbidden = forbidden or (forbidden is None) continue forbidden = False # enforce maximum sample per request restriction if maxSamples is not None: try: n = cha.sampleRateNumerator() d = cha.sampleRateDenominator() except ValueError: msg = "skipping stream without sampling " \ "rate definition: %s.%s.%s.%s" % ( net.code(), sta.code(), loc.code(), cha.code()) Logging.warning(msg) continue # calculate number of samples for requested # time window diffSec = (end_time - start_time).length() samples += int(diffSec * n / d) if samples > maxSamples: msg = "maximum number of %sM samples " \ "exceeded" % str(app._samplesM) return HTTP.renderErrorPage(req, http.REQUEST_ENTITY_TOO_LARGE, msg, ro) Logging.debug("adding stream: %s.%s.%s.%s %s - %s" \ % (net.code(), sta.code(), loc.code(), cha.code(), start_time.iso(), end_time.iso())) rs.addStream(net.code(), sta.code(), loc.code(), cha.code(), start_time, end_time, utils.isRestricted(cha), sta.archiveNetworkCode()) if forbidden: if tracker: tracker.volume_status("fdsnws", "NODATA", 0, "") tracker.request_status("END", "") msg = "access denied" return HTTP.renderErrorPage(req, http.FORBIDDEN, msg, ro) elif forbidden is None: if tracker: tracker.volume_status("fdsnws", "NODATA", 0, "") tracker.request_status("END", "") msg = "no metadata found" return HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro) # Build output filename fileName = Application.Instance()._fileNamePrefix.replace("%time", time.strftime('%Y-%m-%dT%H:%M:%S')) + '.mseed' # Create producer for async IO prod = _WaveformProducer(req, ro, rs, fileName, tracker) req.registerProducer(prod, True) prod.resumeProducing() # The request is handled by the deferred object return server.NOT_DONE_YET
def _processRequest(self, req, ro): if ro.quality != 'B' and ro.quality != 'M': msg = "quality other than 'B' or 'M' not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.minimumLength: msg = "enforcing of minimum record length not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.longestOnly: msg = "limitation to longest segment not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) app = Application.Instance() ro._checkTimes(app._realtimeGap) # Open record stream rs = RecordStream.Open(self._rsURL) if rs is None: msg = "could not open record stream" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) maxSamples = None if app._samplesM is not None: maxSamples = app._samplesM * 1000000 samples = 0 app = Application.Instance() if app._trackdbEnabled: userid = ro.userName or app._trackdbDefaultUser reqid = 'ws' + str(int(round(time.time() * 1000) - 1420070400000)) tracker = RequestTrackerDB("fdsnws", app.connection(), reqid, "WAVEFORM", userid, "REQUEST WAVEFORM " + reqid, "fdsnws", req.getClientIP(), req.getClientIP()) else: tracker = None # Add request streams # iterate over inventory networks for s in ro.streams: for net in self._networkIter(s): for sta in self._stationIter(net, s): for loc in self._locationIter(sta, s): for cha in self._streamIter(loc, s): if utils.isRestricted(cha) and \ (not self.__user or (self.__access and not self.__access.authorize(self.__user, net.code(), sta.code(), loc.code(), cha.code(), s.time.start, s.time.end))): continue # enforce maximum sample per request restriction if maxSamples is not None: try: n = cha.sampleRateNumerator() d = cha.sampleRateDenominator() except ValueException: msg = "skipping stream without sampling " \ "rate definition: %s.%s.%s.%s" % ( net.code(), sta.code(), loc.code(), cha.code()) Logging.warning(msg) continue # calculate number of samples for requested # time window diffSec = (s.time.end - s.time.start).length() samples += int(diffSec * n / d) if samples > maxSamples: msg = "maximum number of %sM samples " \ "exceeded" % str(app._samplesM) return HTTP.renderErrorPage( req, http.REQUEST_ENTITY_TOO_LARGE, msg, ro) Logging.debug("adding stream: %s.%s.%s.%s %s - %s" \ % (net.code(), sta.code(), loc.code(), cha.code(), s.time.start.iso(), s.time.end.iso())) rs.addStream(net.code(), sta.code(), loc.code(), cha.code(), s.time.start, s.time.end) if tracker: tracker.line_status(s.time.start, s.time.end, net.code(), sta.code(), cha.code(), loc.code(), False, "", True, [], "fdsnws", "OK", 0, "") # Build output filename fileName = Application.Instance()._fileNamePrefix.replace( "%time", time.strftime('%Y-%m-%dT%H:%M:%S')) + '.mseed' # Create producer for async IO req.registerProducer(_WaveformProducer(req, ro, rs, fileName, tracker), False) # The request is handled by the deferred object return server.NOT_DONE_YET
def _processRequestText(self, req, ro): if req._disconnected: return False lineCount = 0 skipRestricted = not self._allowRestricted or (ro.restricted is not None and not ro.restricted) df = "%FT%T" data = "" # level = network if not ro.includeSta: data = "#Network|Description|StartTime|EndTime|TotalStations\n" # iterate over inventory networks for net in ro.networkIter(self._inv, True): if req._disconnected: return False if skipRestricted and utils.isRestricted(net): continue # at least one matching station is required stationFound = False for sta in ro.stationIter(net, False): if self._matchStation(sta, ro): stationFound = True break if not stationFound: continue try: end = net.end().toString(df) except ValueException: end = "" data += "%s|%s|%s|%s|%i\n" % ( net.code(), net.description(), net.start().toString(df), end, net.stationCount(), ) lineCount += 1 # level = station elif not ro.includeCha: data = "#Network|Station|Latitude|Longitude|Elevation|" "SiteName|StartTime|EndTime\n" # iterate over inventory networks for net in ro.networkIter(self._inv, False): if req._disconnected: return False if skipRestricted and utils.isRestricted(net): continue # iterate over inventory stations for sta in ro.stationIter(net, True): if not self._matchStation(sta, ro): continue try: lat = str(sta.latitude()) except ValueException: lat = "" try: lon = str(sta.longitude()) except ValueException: lon = "" try: elev = str(sta.elevation()) except ValueException: elev = "" try: desc = sta.description() except ValueException: desc = "" try: end = sta.end().toString(df) except ValueException: end = "" data += "%s|%s|%s|%s|%s|%s|%s|%s\n" % ( net.code(), sta.code(), lat, lon, elev, desc, sta.start().toString(df), end, ) lineCount += 1 # level = channel|response else: data = ( "#Network|Station|Location|Channel|Latitude|Longitude|" "Elevation|Depth|Azimuth|Dip|SensorDescription|Scale|" "ScaleFreq|ScaleUnits|SampleRate|StartTime|EndTime\n" ) # iterate over inventory networks for net in ro.networkIter(self._inv, False): if req._disconnected: return False if skipRestricted and utils.isRestricted(net): continue # iterate over inventory stations, locations, streams for sta in ro.stationIter(net, False): for loc in ro.locationIter(sta, True): for stream in ro.streamIter(loc, True): if skipRestricted and utils.isRestricted(stream): continue try: lat = str(loc.latitude()) except ValueException: lat = "" try: lon = str(loc.longitude()) except ValueException: lon = "" try: elev = str(loc.elevation()) except ValueException: elev = "" try: depth = str(stream.depth()) except ValueException: depth = "" try: azi = str(stream.azimuth()) except ValueException: azi = "" try: dip = str(stream.dip()) except ValueException: dip = "" desc = "" try: sensor = self._inv.findSensor(stream.sensor()) if sensor is not None: desc = sensor.description() except ValueException: pass try: scale = str(stream.gain()) except ValueException: scale = "" try: scaleFreq = str(stream.gainFrequency()) except ValueException: scaleFreq = "" try: scaleUnit = str(stream.gainUnit()) except ValueException: scaleUnit = "" try: sr = str(stream.sampleRateNumerator() / float(stream.sampleRateDenominator())) except ValueException, ZeroDevisionError: sr = "" try: end = stream.end().toString(df) except ValueException: end = "" data += "%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|" "%s|%s|%s|%s\n" % ( net.code(), sta.code(), loc.code(), stream.code(), lat, lon, elev, depth, azi, dip, desc, scale, scaleFreq, scaleUnit, sr, stream.start().toString(df), end, ) lineCount += 1
def _processRequestExp(self, req, ro, exp): if req._disconnected: return False staCount, locCount, chaCount, objCount = 0, 0, 0, 0 DataModel.PublicObject.SetRegistrationEnabled(False) newInv = DataModel.Inventory() dataloggers, sensors = set(), set() skipRestricted = not self._allowRestricted or (ro.restricted is not None and not ro.restricted) levelNet = not ro.includeSta levelSta = ro.includeSta and not ro.includeCha # iterate over inventory networks for net in ro.networkIter(self._inv, levelNet): if req._disconnected: return False if skipRestricted and utils.isRestricted(net): continue newNet = DataModel.Network(net) # iterate over inventory stations of current network for sta in ro.stationIter(net, levelSta): if req._disconnected: return False if skipRestricted and utils.isRestricted(sta): continue if not HTTP.checkObjects(req, objCount, self._maxObj): return False if ro.includeCha: numCha, numLoc, d, s = self._processStation(newNet, sta, ro, skipRestricted) if numCha > 0: locCount += numLoc chaCount += numCha objCount += numLoc + numCha if not HTTP.checkObjects(req, objCount, self._maxObj): return False dataloggers |= d sensors |= s elif self._matchStation(sta, ro): if ro.includeSta: newNet.add(DataModel.Station(sta)) else: # no station output requested: one matching station # is sufficient to include the network newInv.add(newNet) objCount += 1 break if newNet.stationCount() > 0: newInv.add(newNet) staCount += newNet.stationCount() objCount += staCount + 1 # Return 204 if no matching inventory was found if newInv.networkCount() == 0: msg = "no matching inventory found" data = HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro) if data: utils.writeTS(req, data) return True # Copy references (dataloggers, responses, sensors) decCount, resCount = 0, 0 if ro.includeCha: decCount = self._copyReferences(newInv, req, objCount, self._inv, ro, dataloggers, sensors, self._maxObj) if decCount is None: return False else: resCount = newInv.responsePAZCount() + newInv.responseFIRCount() + newInv.responsePolynomialCount() objCount += resCount + decCount + newInv.dataloggerCount() + newInv.sensorCount() sink = utils.Sink(req) if not exp.write(sink, newInv): return False Logging.notice( "%s: returned %iNet, %iSta, %iLoc, %iCha, " "%iDL, %iDec, %iSen, %iRes (total objects/bytes: " "%i/%i) " % ( ro.service, newInv.networkCount(), staCount, locCount, chaCount, newInv.dataloggerCount(), decCount, newInv.sensorCount(), resCount, objCount, sink.written, ) ) utils.accessLog(req, ro, http.OK, sink.written, None) return True
def _processRequest(self, req, ro): if ro.quality != 'B' and ro.quality != 'M': msg = "quality other than 'B' or 'M' not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.minimumLength: msg = "enforcing of minimum record length not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) if ro.longestOnly: msg = "limitation to longest segment not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) app = Application.Instance() ro._checkTimes(app._realtimeGap) # Open record stream rs = RecordStream.Open(self._rsURL) if rs is None: msg = "could not open record stream" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) maxSamples = None if app._samplesM is not None: maxSamples = app._samplesM * 1000000 samples = 0 # Add request streams # iterate over inventory networks inv = Application.Instance()._inv for s in ro.streams: for net in self._networkIter(inv, s): if ro.userName is None and utils.isRestricted(net): continue for sta in self._stationIter(net, s): if ro.userName is None and utils.isRestricted(sta): continue for loc in self._locationIter(sta, s): for cha in self._streamIter(loc, s): # enforce maximum sample per request restriction if maxSamples is not None: try: n = cha.sampleRateNumerator() d = cha.sampleRateDenominator() except ValueException: msg = "skipping stream without sampling " \ "rate definition: %s.%s.%s.%s" % ( net.code(), sta.code(), loc.code(), cha.code()) Logging.warning(msg) continue # calculate number of samples for requested # time window diffSec = (s.time.end - s.time.start).length() samples += int(diffSec * n / d) if samples > maxSamples: msg = "maximum number of %sM samples " \ "exceeded" % str(app._samplesM) return HTTP.renderErrorPage( req, http.REQUEST_ENTITY_TOO_LARGE, msg, ro) Logging.debug("adding stream: %s.%s.%s.%s %s - %s" \ % (net.code(), sta.code(), loc.code(), cha.code(), s.time.start.iso(), s.time.end.iso())) rs.addStream(net.code(), sta.code(), loc.code(), cha.code(), s.time.start, s.time.end) # Build output filename fileName = Application.Instance()._fileNamePrefix + '.mseed' # Create producer for async IO req.registerProducer(_WaveformProducer(req, ro, rs, fileName), False) # The request is handled by the deferred object return server.NOT_DONE_YET