def renderErrorPage(request, code, msg, ro=None): resp = """\ Error %i: %s %s Usage details are available from %s Request: %s Request Submitted: %s Service Version: %s """ # rewrite response code if requested and no data was found if ro is not None and code == http.NO_CONTENT: code = ro.noData request.setHeader('Content-Type', 'text/plain') request.setResponseCode(code) reference = "%s/" % (request.path.rpartition('/')[0]) codeStr = http.RESPONSES[code] Logging.warning("responding with error: %i (%s)" % (code, codeStr)) date = Core.Time.GMT().toString("%FT%T.%f") response = resp % (code, codeStr, msg, reference, request.uri, date, VERSION) utils.accessLog(request, ro, code, len(response), msg) return response
def _finish(self): if self.stopped: return if self.written == 0: msg = "no waveform data found" errorpage = HTTP.renderErrorPage(self.req, http.NO_CONTENT, msg, VERSION, self.ro) if errorpage: self.req.write(errorpage) for tracker in self.trackerList: tracker.volume_status("fdsnws", "NODATA", 0, "") tracker.request_status("END", "") else: Logging.debug("%s: returned %i bytes of mseed data" % (self.ro.service, self.written)) utils.accessLog(self.req, self.ro, http.OK, self.written, None) for tracker in self.trackerList: tracker.volume_status("fdsnws", "OK", self.written, "") tracker.request_status("END", "") self.req.unregisterProducer() self.req.finish()
def resumeProducing(self): rec = None try: rec = self.rsInput.next() except Exception, e: Logging.warning("%s" % str(e)) if self.written == 0: # read first record to test if any data exists at all if not rec: msg = "no waveform data found" data = HTTP.renderErrorPage(self.req, http.NO_CONTENT, msg, self.ro) if data: self.req.write(data) self.req.unregisterProducer() self.req.finish() return self.req.setHeader('Content-Type', 'application/vnd.fdsn.mseed') self.req.setHeader('Content-Disposition', "attachment; " \ "filename=%s" % self.fileName) if not rec: self.req.unregisterProducer() Logging.debug("%s: returned %i bytes of mseed data" % ( self.ro.service, self.written)) utils.accessLog(self.req, self.ro, http.OK, self.written, None) self.req.finish() return data = rec.raw().str() self.req.write(data) self.written += len(data)
def stopProducing(self): self.stopped = True Logging.debug("%s: returned %i bytes of mseed data (not completed)" % (self.ro.service, self.written)) utils.accessLog(self.req, self.ro, http.OK, self.written, "not completed") for tracker in self.trackerList: tracker.volume_status("fdsnws", "ERROR", self.written, "") tracker.request_status("END", "") self.req.unregisterProducer() self.req.finish()
def renderErrorPage(request, code, msg, version=VERSION, ro=None): resp = """\ Error %i: %s %s Usage details are available from %s Request: %s Request Submitted: %s Service Version: %s """ noContent = code == http.NO_CONTENT # rewrite response code if requested and no data was found if noContent and ro is not None: code = ro.noData # set response code request.setResponseCode(code) # status code 204 requires no message body if code == http.NO_CONTENT: response = "" else: request.setHeader('Content-Type', 'text/plain') reference = "%s/" % (request.path.rpartition('/')[0]) codeStr = http.RESPONSES[code] date = Core.Time.GMT().toString("%FT%T.%f") response = resp % (code, codeStr, msg, reference, request.uri, date, version) if not noContent: Logging.warning("responding with error: %i (%s)" % (code, codeStr)) utils.accessLog(request, ro, code, len(response), msg) return response
class _WaveformProducer: def __init__(self, req, ro, rs, fileName, tracker): self.req = req self.ro = ro self.rs = rs # keep a reference to avoid crash self.rsInput = RecordInput(rs, Array.INT, Record.SAVE_RAW) self.fileName = fileName self.written = 0 self.tracker = tracker def resumeProducing(self): rec = None try: rec = self.rsInput.next() except Exception, e: Logging.warning("%s" % str(e)) if self.written == 0: # read first record to test if any data exists at all if not rec: msg = "no waveform data found" data = HTTP.renderErrorPage(self.req, http.NO_CONTENT, msg, self.ro) if data: self.req.write(data) self.req.unregisterProducer() self.req.finish() if self.tracker: self.tracker.volume_status("fdsnws", "NODATA", 0, "") self.tracker.request_status("END", "") return self.req.setHeader('Content-Type', 'application/vnd.fdsn.mseed') self.req.setHeader('Content-Disposition', "attachment; " \ "filename=%s" % self.fileName) if not rec: self.req.unregisterProducer() Logging.debug("%s: returned %i bytes of mseed data" % (self.ro.service, self.written)) utils.accessLog(self.req, self.ro, http.OK, self.written, None) self.req.finish() if self.tracker: self.tracker.volume_status("fdsnws", "OK", self.written, "") self.tracker.request_status("END", "") return data = rec.raw().str() self.req.write(data) self.written += len(data)
def _finish(self): if self.written == 0: msg = "no waveform data found" HTTP.renderErrorPage(self.req, http.NO_CONTENT, msg, self.ro) if self.tracker: self.tracker.volume_status("fdsnws", "NODATA", 0, "") self.tracker.request_status("END", "") else: Logging.debug("%s: returned %i bytes of mseed data" % ( self.ro.service, self.written)) utils.accessLog(self.req, self.ro, http.OK, self.written, None) if self.tracker: self.tracker.volume_status("fdsnws", "OK", self.written, "") self.tracker.request_status("END", "") self.req.unregisterProducer() self.req.finish()
datetime.datetime.now(dateutil.tz.tzutc()) lifetime = td.seconds + td.days * 24 * 3600 except Exception, e: msg = "token has invalid validity" Logging.warning("%s: %s" % (msg, str(e))) return HTTP.renderErrorPage(request, http.BAD_REQUEST, msg, None) if lifetime <= 0: msg = "token is expired" Logging.warning(msg) return HTTP.renderErrorPage(request, http.BAD_REQUEST, msg, None) userid = base64.urlsafe_b64encode(hashlib.sha256(verified.data).digest()[:18]) password = self.__userdb.addUser(userid, attributes, time.time() + min(lifetime, 24 * 3600), verified.data) utils.accessLog(request, None, http.OK, len(userid)+len(password)+1, None) return '%s:%s' % (userid, password) ################################################################################ class Site(server.Site): #--------------------------------------------------------------------------- def getResourceFor(self, request): Logging.debug("request (%s): %s" % (request.getClientIP(), request.uri)) request.setHeader('Server', "SeisComP3-FDSNWS/%s" % VERSION) request.setHeader('Access-Control-Allow-Origin', '*') request.setHeader('Access-Control-Allow-Headers', 'Authorization') request.setHeader('Access-Control-Expose-Headers', 'WWW-Authenticate')
def _processRequest(self, req, ro, dbq, exp): if req._disconnected: return False DataModel.PublicObject.SetRegistrationEnabled(False) maxObj = Application.Instance()._queryObjects # query event(s) ep = DataModel.EventParameters() if ro.eventIDs: for eID in ro.eventIDs: event = dbq.getEventByPublicID(eID) event = DataModel.Event.Cast(event) if event: ep.add(event) else: self._findEvents(ep, ro, dbq) if ep.eventCount() == 0: msg = "No matching events found" utils.writeTS(req, HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro)) return False objCount = ep.eventCount() Logging.debug("events found: %i" % objCount) if not HTTP.checkObjects(req, objCount, maxObj): return False pickIDs = set() # add related information for iEvent in xrange(ep.eventCount()): e = ep.event(iEvent) eID = e.publicID() # eventDescriptions and comments objCount += dbq.loadEventDescriptions(e) if ro.comments: objCount += dbq.loadComments(e) if not HTTP.checkObjects(req, objCount, maxObj): return False # origin references: either all or preferred only dbIter = dbq.getObjects(e, DataModel.OriginReference.TypeInfo()) for obj in dbIter: oRef = DataModel.OriginReference.Cast(obj) if oRef is None: continue if ro.allOrigins: e.add(oRef) elif oRef.originID() == e.preferredOriginID(): e.add(oRef) dbIter.close() objCount += e.originReferenceCount() # TODO: load FocalMechanismReferences??? if not HTTP.checkObjects(req, objCount, maxObj): return False # origins for iORef in xrange(e.originReferenceCount()): oID = e.originReference(iORef).originID() obj = dbq.getObject(DataModel.Origin.TypeInfo(), oID) o = DataModel.Origin.Cast(obj) if o is None: continue ep.add(o) objCount += 1 # comments if ro.comments: objCount += dbq.loadComments(o) if not HTTP.checkObjects(req, objCount, maxObj): return False # magnitudes dbIter = dbq.getObjects(oID, DataModel.Magnitude.TypeInfo()) for obj in dbIter: mag = DataModel.Magnitude.Cast(obj) if mag is None: continue if ro.allMags: o.add(mag) elif mag.publicID() == e.preferredMagnitudeID(): o.add(mag) dbIter.close() objCount += o.magnitudeCount() if ro.comments: for iMag in xrange(o.magnitudeCount()): objCount += dbq.loadComments(o.magnitude(iMag)) if not HTTP.checkObjects(req, objCount, maxObj): return False # arrivals if ro.arrivals: objCount += dbq.loadArrivals(o) # collect pick IDs if requested if ro.picks: for iArrival in xrange(o.arrivalCount()): pickIDs.add(o.arrival(iArrival).pickID()) if not HTTP.checkObjects(req, objCount, maxObj): return False # picks if pickIDs: objCount += len(pickIDs) if not HTTP.checkObjects(req, objCount, maxObj): return False for pickID in pickIDs: obj = dbq.getObject(DataModel.Pick.TypeInfo(), pickID) pick = DataModel.Pick.Cast(obj) if pick is not None: ep.add(pick) if ro.output == "csv": req.setHeader("Content-Type", "text/plain") else: req.setHeader("Content-Type", "application/xml") sink = utils.Sink(req) if not exp.write(sink, ep): return False Logging.notice("%s: returned %i events and %i origins (total " \ "objects/bytes: %i/%i)" % (ro.service, ep.eventCount(), ep.originCount(), objCount, sink.written)) utils.accessLog(req, ro, http.OK, sink.written, None) return True
req.write(line) lineCount += 1 byteCount += len(line) # Return 204 if no matching inventory was found if lineCount == 0: msg = "no matching inventory found" data = HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro) if data: req.write(data) return False Logging.notice("%s: returned %i lines (total bytes: %i)" % ( ro.service, lineCount, byteCount)) utils.accessLog(req, ro, http.OK, byteCount, None) return True #--------------------------------------------------------------------------- # Checks if at least one location and channel combination matches the # request options @staticmethod def _matchStation(sta, ro): # No filter: return true immediately if not ro.channel or (not ro.channel.loc and not ro.channel.cha): return True for loc in ro.locationIter(sta, False): if not ro.channel.cha and not ro.time: return True
def _processRequest(self, req, ro, exp): if req._disconnected: return False DataModel.PublicObject.SetRegistrationEnabled(False) maxObj = Application.Instance()._queryObjects staCount, locCount, chaCount, objCount = 0, 0, 0, 0 DataModel.PublicObject.SetRegistrationEnabled(False) inv = Application.Instance()._inv newInv = DataModel.Inventory() filterChannel = ro.channel and (ro.channel.loc or ro.channel.cha) dataloggers, sensors = set(), set() # iterate over inventory networks for net in utils.networkIter(inv, ro): if not ro.restricted and net.restricted(): continue newNet = DataModel.Network(net) # iterate over inventory stations of current network for sta in utils.stationIter(net, ro, matchGeo=True): if not ro.restricted and sta.restricted(): continue if not HTTP.checkObjects(req, objCount, maxObj): return False if ro.includeCha: numCha, numLoc, d, s = self._processStation(newNet, sta, ro) if numCha > 0: locCount += numLoc chaCount += numCha objCount += numLoc + numCha if not HTTP.checkObjects(req, objCount, maxObj): return False dataloggers |= d sensors |= s elif self._matchStation(sta, ro): if ro.includeSta: newNet.add(DataModel.Station(sta)) else: # no station output requested: one matching station is # sufficient to include the network newInv.add(newNet) objCount += 1 break if newNet.stationCount() > 0: newInv.add(newNet) staCount += newNet.stationCount() objCount += staCount + 1 # Return 204 if no matching inventory was found if newInv.networkCount() == 0: utils.writeTS(req, HTTP.renderErrorPage(req, http.NO_CONTENT, "No matching inventory found", ro)) return False # Copy references (dataloggers, responses, sensors) decCount, resCount = 0, 0 if ro.includeCha: decCount = self._copyReferences(newInv, req, objCount, inv, ro, dataloggers, sensors) if decCount is None: return False else: resCount = newInv.responsePAZCount() + \ newInv.responseFIRCount() + \ newInv.responsePolynomialCount() objCount += resCount + decCount + newInv.dataloggerCount() + \ newInv.sensorCount() req.setHeader("Content-Type", "application/xml") sink = utils.Sink(req) if not exp.write(sink, newInv): return False Logging.notice("%s: returned %iNet, %iSta, %iLoc, %iCha, " \ "%iDL, %iDec, %iSen, %iRes (total objects/bytes: " \ "%i/%i) " % (ro.service, newInv.networkCount(), staCount, locCount, chaCount, newInv.dataloggerCount(), decCount, newInv.sensorCount(), resCount, objCount, sink.written)) utils.accessLog(req, ro, http.OK, sink.written, None) return True
def _processRequestText(self, req, ro, dbq, ep): lineCount = 0 line = "#EventID|Time|Latitude|Longitude|Depth/km|Author|Catalog|" \ "Contributor|ContributorID|MagType|Magnitude|MagAuthor|" \ "EventLocationName\n" df = "%FT%T.%f" utils.writeTS(req, line) byteCount = len(line) # add related information for iEvent in xrange(ep.eventCount()): e = ep.event(iEvent) eID = e.publicID() # query for preferred origin obj = dbq.getObject(DataModel.Origin.TypeInfo(), e.preferredOriginID()) o = DataModel.Origin.Cast(obj) if o is None: Logging.warning("preferred origin of event '%s' not found: %s" % ( eID, e.preferredOriginID())) continue # depth try: depth = str(o.depth().value()) except ValueException: depth = '' # author if self._hideAuthor: author = '' else: try: author = o.creationInfo().author() except ValueException: author = '' # contributor try: contrib = e.creationInfo().agencyID() except ValueException: contrib = '' # query for preferred magnitude (if any) mType, mVal, mAuthor = '', '', '' if e.preferredMagnitudeID(): obj = dbq.getObject(DataModel.Magnitude.TypeInfo(), e.preferredMagnitudeID()) m = DataModel.Magnitude.Cast(obj) if m is not None: mType = m.type() mVal = str(m.magnitude().value()) if self._hideAuthor: mAuthor = '' else: try: mAuthor = m.creationInfo().author() except ValueException: pass # event description dbq.loadEventDescriptions(e) region = '' for i in xrange(e.eventDescriptionCount()): ed = e.eventDescription(i) if ed.type() == DataModel.REGION_NAME: region = ed.text() break if req._disconnected: return False line = "%s|%s|%f|%f|%s|%s||%s|%s|%s|%s|%s|%s\n" % ( eID, o.time().value().toString(df), o.latitude().value(), o.longitude().value(), depth, author, contrib, eID, mType, mVal, mAuthor, region) utils.writeTS(req, line) lineCount +=1 byteCount += len(line) # write response Logging.debug("%s: returned %i events (total bytes: %i) " % ( ro.service, lineCount, byteCount)) utils.accessLog(req, ro, http.OK, byteCount, None) return True
lines.sort(key=lambda line: line[0]) for line in lines: data += line[1] # Return 204 if no matching inventory was found if len(lines) == 0: msg = "no matching inventory found" data = self.renderErrorPage(req, http.NO_CONTENT, msg, ro) if data: utils.writeTS(req, data) return False utils.writeTS(req, data) Logging.debug("%s: returned %i lines (total bytes: %i)" % (ro.service, len(lines), len(data))) utils.accessLog(req, ro, http.OK, len(data), None) return True #--------------------------------------------------------------------------- # Checks if at least one location and channel combination matches the # request options @staticmethod def _matchStation(net, sta, ro, dac): # No filter: return true immediately if dac is None and \ (not ro.channel or (not ro.channel.loc and not ro.channel.cha)): return True for loc in ro.locationIter(net, sta, False): if dac is None and not ro.channel.cha and not ro.time: return True
def _processRequestExp(self, req, ro, dbq, exp, ep): objCount = ep.eventCount() maxObj = Application.Instance()._queryObjects if not HTTP.checkObjects(req, objCount, maxObj): return False pickIDs = set() if ro.picks is None: ro.picks = True # add related information for iEvent in xrange(ep.eventCount()): if req._disconnected: return False e = ep.event(iEvent) if self._hideAuthor: self._removeAuthor(e) originIDs = set() magIDs = set() magIDs.add(e.preferredMagnitudeID()) # eventDescriptions and comments objCount += dbq.loadEventDescriptions(e) if ro.comments: objCount += self._loadComments(dbq, e) if not HTTP.checkObjects(req, objCount, maxObj): return False # origin references: either all or preferred only dbIter = dbq.getObjects(e, DataModel.OriginReference.TypeInfo()) for obj in dbIter: oRef = DataModel.OriginReference.Cast(obj) if oRef is None: continue if ro.allOrigins: e.add(oRef) originIDs.add(oRef.originID()) elif oRef.originID() == e.preferredOriginID(): e.add(oRef) originIDs.add(oRef.originID()) dbIter.close() objCount += e.originReferenceCount() # focalMechanism references: either none, preferred only or all if ro.fm or ro.allFMs: dbIter = dbq.getObjects(e, DataModel.FocalMechanismReference.TypeInfo()) for obj in dbIter: fmRef = DataModel.FocalMechanismReference.Cast(obj) if fmRef is None: continue if ro.allFMs: e.add(fmRef) elif fmRef.focalMechanismID() == e.preferredFocalMechanismID(): e.add(fmRef) dbIter.close() objCount += e.focalMechanismReferenceCount() if not HTTP.checkObjects(req, objCount, maxObj): return False # focal mechanisms: process before origins to add derived origin to # originID list since it may be missing from origin reference list for iFMRef in xrange(e.focalMechanismReferenceCount()): if req._disconnected: return False fmID = e.focalMechanismReference(iFMRef).focalMechanismID() obj = dbq.getObject(DataModel.FocalMechanism.TypeInfo(), fmID) fm = DataModel.FocalMechanism.Cast(obj) if fm is None: continue ep.add(fm) objCount += 1 if self._hideAuthor: self._removeAuthor(fm) # comments if ro.comments: objCount += self._loadComments(dbq, fm) # momentTensors objCount += dbq.loadMomentTensors(fm) if not HTTP.checkObjects(req, objCount, maxObj): return False for iMT in xrange(fm.momentTensorCount()): mt = fm.momentTensor(iMT) originIDs.add(mt.derivedOriginID()) magIDs.add(mt.momentMagnitudeID()) if self._hideAuthor: self._removeAuthor(mt) if ro.comments: for iMT in xrange(fm.momentTensorCount()): objCount += self._loadComments(dbq, mt) objCount += dbq.loadDataUseds(mt); objCount += dbq.loadMomentTensorPhaseSettings(mt); if ro.staMTs: objCount += dbq.loadMomentTensorStationContributions(mt); for iStaMT in xrange(mt.momentTensorStationContributionCount()): objCount += dbq.load(mt.momentTensorStationContribution(iStaMT)) if not HTTP.checkObjects(req, objCount, maxObj): return False # find ID of origin containing preferred Magnitude if e.preferredMagnitudeID(): obj = dbq.getObject(DataModel.Magnitude.TypeInfo(), e.preferredMagnitudeID()) m = DataModel.Magnitude.Cast(obj) if m is not None: oID = dbq.parentPublicID(m) if oID: originIDs.add(oID) # origins for oID in originIDs: if req._disconnected: return False obj = dbq.getObject(DataModel.Origin.TypeInfo(), oID) o = DataModel.Origin.Cast(obj) if o is None: continue ep.add(o) objCount += 1 if self._hideAuthor: self._removeAuthor(o) # comments if ro.comments: objCount += self._loadComments(dbq, o) if not HTTP.checkObjects(req, objCount, maxObj): return False # magnitudes dbIter = dbq.getObjects(oID, DataModel.Magnitude.TypeInfo()) for obj in dbIter: mag = DataModel.Magnitude.Cast(obj) if mag is None: continue if ro.allMags: o.add(mag) elif mag.publicID() in magIDs: o.add(mag) dbIter.close() if self._hideAuthor: self._removeAuthor(mag) objCount += o.magnitudeCount() if ro.comments: for iMag in xrange(o.magnitudeCount()): objCount += self._loadComments(dbq, o.magnitude(iMag)) if not HTTP.checkObjects(req, objCount, maxObj): return False # TODO station magnitudes, amplitudes # - added pick id for each pick referenced by amplitude # arrivals if ro.arrivals: objCount += dbq.loadArrivals(o) if self._removeAuthor: for iArrival in xrange(o.arrivalCount()): self._removeAuthor(o.arrival(iArrival)) # collect pick IDs if requested if ro.picks: for iArrival in xrange(o.arrivalCount()): pickIDs.add(o.arrival(iArrival).pickID()) if not HTTP.checkObjects(req, objCount, maxObj): return False # picks if pickIDs: objCount += len(pickIDs) if not HTTP.checkObjects(req, objCount, maxObj): return False for pickID in pickIDs: obj = dbq.getObject(DataModel.Pick.TypeInfo(), pickID) pick = DataModel.Pick.Cast(obj) if pick is not None: if self._hideAuthor: self._removeAuthor(pick) if ro.comments: objCount += self._loadComments(dbq, pick) ep.add(pick) if not HTTP.checkObjects(req, objCount, maxObj): return False # write response sink = utils.Sink(req) if not exp.write(sink, ep): return False Logging.debug("%s: returned %i events and %i origins (total " \ "objects/bytes: %i/%i)" % (ro.service, ep.eventCount(), ep.originCount(), objCount, sink.written)) utils.accessLog(req, ro, http.OK, sink.written, None) return True
lines.sort(key = lambda line: line[0]) for line in lines: data += line[1] # Return 204 if no matching inventory was found if len(lines) == 0: msg = "no matching inventory found" data = HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro) if data: utils.writeTS(req, data) return False utils.writeTS(req, data) Logging.debug("%s: returned %i lines (total bytes: %i)" % ( ro.service, len(lines), len(data))) utils.accessLog(req, ro, http.OK, len(data), None) return True #--------------------------------------------------------------------------- # Checks if at least one location and channel combination matches the # request options @staticmethod def _matchStation(net, sta, ro, dac): # No filter: return true immediately if dac is None and \ ( not ro.channel or ( not ro.channel.loc and not ro.channel.cha ) ): return True for loc in ro.locationIter(net, sta, False): if dac is None and not ro.channel.cha and not ro.time:
def _processRequestExp(self, req, ro, exp): if req._disconnected: return False staCount, locCount, chaCount, objCount = 0, 0, 0, 0 DataModel.PublicObject.SetRegistrationEnabled(False) newInv = DataModel.Inventory() dataloggers, sensors = set(), set() skipRestricted = not self._allowRestricted or (ro.restricted is not None and not ro.restricted) levelNet = not ro.includeSta levelSta = ro.includeSta and not ro.includeCha # iterate over inventory networks for net in ro.networkIter(self._inv, levelNet): if req._disconnected: return False if skipRestricted and utils.isRestricted(net): continue newNet = DataModel.Network(net) # iterate over inventory stations of current network for sta in ro.stationIter(net, levelSta): if req._disconnected: return False if skipRestricted and utils.isRestricted(sta): continue if not HTTP.checkObjects(req, objCount, self._maxObj): return False if ro.includeCha: numCha, numLoc, d, s = self._processStation(newNet, sta, ro, skipRestricted) if numCha > 0: locCount += numLoc chaCount += numCha objCount += numLoc + numCha if not HTTP.checkObjects(req, objCount, self._maxObj): return False dataloggers |= d sensors |= s elif self._matchStation(sta, ro): if ro.includeSta: newNet.add(DataModel.Station(sta)) else: # no station output requested: one matching station # is sufficient to include the network newInv.add(newNet) objCount += 1 break if newNet.stationCount() > 0: newInv.add(newNet) staCount += newNet.stationCount() objCount += staCount + 1 # Return 204 if no matching inventory was found if newInv.networkCount() == 0: msg = "no matching inventory found" data = HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro) if data: utils.writeTS(req, data) return True # Copy references (dataloggers, responses, sensors) decCount, resCount = 0, 0 if ro.includeCha: decCount = self._copyReferences(newInv, req, objCount, self._inv, ro, dataloggers, sensors, self._maxObj) if decCount is None: return False else: resCount = newInv.responsePAZCount() + newInv.responseFIRCount() + newInv.responsePolynomialCount() objCount += resCount + decCount + newInv.dataloggerCount() + newInv.sensorCount() sink = utils.Sink(req) if not exp.write(sink, newInv): return False Logging.notice( "%s: returned %iNet, %iSta, %iLoc, %iCha, " "%iDL, %iDec, %iSen, %iRes (total objects/bytes: " "%i/%i) " % ( ro.service, newInv.networkCount(), staCount, locCount, chaCount, newInv.dataloggerCount(), decCount, newInv.sensorCount(), resCount, objCount, sink.written, ) ) utils.accessLog(req, ro, http.OK, sink.written, None) return True
except Exception, e: msg = "token has invalid validity" Logging.warning("%s: %s" % (msg, str(e))) return self.renderErrorPage(request, http.BAD_REQUEST, msg) if lifetime <= 0: msg = "token is expired" Logging.warning(msg) return self.renderErrorPage(request, http.BAD_REQUEST, msg) userid = base64.urlsafe_b64encode( hashlib.sha256(verified.data).digest()[:18]) password = self.__userdb.addUser( userid, attributes, time.time() + min(lifetime, 24 * 3600), verified.data) utils.accessLog(request, None, http.OK, len(userid) + len(password) + 1, None) return '%s:%s' % (userid, password) ################################################################################ class Site(server.Site): #--------------------------------------------------------------------------- def getResourceFor(self, request): Logging.debug("request (%s): %s" % (request.getClientIP(), request.uri)) request.setHeader('Server', "SeisComP3-FDSNWS/%s" % VERSION) request.setHeader('Access-Control-Allow-Origin', '*') request.setHeader('Access-Control-Allow-Headers', 'Authorization') request.setHeader('Access-Control-Expose-Headers', 'WWW-Authenticate') return server.Site.getResourceFor(self, request)
def _processRequestExp(self, req, ro, exp, dac): if req._disconnected: return False staCount, locCount, chaCount, extCount, objCount = 0, 0, 0, 0, 0 DataModel.PublicObject.SetRegistrationEnabled(False) newInv = DataModel.Inventory() dataloggers, sensors, extents = set(), set(), set() skipRestricted = not self._allowRestricted or \ (ro.restricted is not None and not ro.restricted) levelNet = not ro.includeSta levelSta = ro.includeSta and not ro.includeCha # iterate over inventory networks for net in ro.networkIter(self._inv, levelNet): if req._disconnected: return False if skipRestricted and utils.isRestricted(net): continue newNet = DataModel.Network(net) # Copy comments for i in xrange(net.commentCount()): newNet.add(DataModel.Comment(net.comment(i))) # iterate over inventory stations of current network for sta in ro.stationIter(net, levelSta): if req._disconnected: return False if skipRestricted and utils.isRestricted(sta): continue if not HTTP.checkObjects(req, objCount, self._maxObj): return False if ro.includeCha: numCha, numLoc, d, s, e = \ self._processStation(newNet, net, sta, ro, dac, skipRestricted) if numCha > 0: locCount += numLoc chaCount += numCha extCount += len(e) objCount += numLoc + numCha + extCount if not HTTP.checkObjects(req, objCount, self._maxObj): return False dataloggers |= d sensors |= s extents |= e elif self._matchStation(net, sta, ro, dac): if ro.includeSta: newSta = DataModel.Station(sta) # Copy comments for i in xrange(sta.commentCount()): newSta.add(DataModel.Comment(sta.comment(i))) newNet.add(newSta) else: # no station output requested: one matching station # is sufficient to include the network newInv.add(newNet) objCount += 1 break if newNet.stationCount() > 0: newInv.add(newNet) staCount += newNet.stationCount() objCount += staCount + 1 # Return 204 if no matching inventory was found if newInv.networkCount() == 0: msg = "no matching inventory found" data = HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro) if data: utils.writeTS(req, data) return True # Copy references (dataloggers, responses, sensors) decCount, resCount = 0, 0 if ro.includeCha: decCount = self._copyReferences(newInv, req, objCount, self._inv, ro, dataloggers, sensors, self._maxObj) if decCount is None: return False else: resCount = newInv.responsePAZCount() + \ newInv.responseFIRCount() + \ newInv.responsePolynomialCount() + \ newInv.responseFAPCount() + \ newInv.responseIIRCount() objCount += resCount + decCount + newInv.dataloggerCount() + \ newInv.sensorCount() # Copy data extents objOut = newInv if len(extents) > 0: objCount += 1 da = DataModel.DataAvailability() for e in extents: da.add(DataModel.DataExtent(e)) objOut = ExportObjectList() objOut.append(newInv) objOut.append(da) sink = utils.Sink(req) if not exp.write(sink, objOut): return False Logging.debug("%s: returned %iNet, %iSta, %iLoc, %iCha, " \ "%iDL, %iDec, %iSen, %iRes, %iDAExt (total objects/" \ "bytes: %i/%i) " % (ro.service, newInv.networkCount(), staCount, locCount, chaCount, newInv.dataloggerCount(), decCount, newInv.sensorCount(), resCount, extCount, objCount, sink.written)) utils.accessLog(req, ro, http.OK, sink.written, None) return True
def _processRequestText(self, req, ro, dbq, ep): lineCount = 0 line = "#EventID|Time|Latitude|Longitude|Depth/km|Author|Catalog|" \ "Contributor|ContributorID|MagType|Magnitude|MagAuthor|" \ "EventLocationName\n" df = "%FT%T.%f" utils.writeTS(req, line) byteCount = len(line) # add related information for iEvent in xrange(ep.eventCount()): e = ep.event(iEvent) eID = e.publicID() # query for preferred origin obj = dbq.getObject(DataModel.Origin.TypeInfo(), e.preferredOriginID()) o = DataModel.Origin.Cast(obj) if o is None: Logging.warning("preferred origin of event '%s' not found: %s" % ( eID, e.preferredOriginID())) continue # depth try: depth = str(o.depth().value()) except ValueError: depth = '' # author if self._hideAuthor: author = '' else: try: author = o.creationInfo().author() except ValueError: author = '' # contributor try: contrib = e.creationInfo().agencyID() except ValueError: contrib = '' # query for preferred magnitude (if any) mType, mVal, mAuthor = '', '', '' if e.preferredMagnitudeID(): obj = dbq.getObject(DataModel.Magnitude.TypeInfo(), e.preferredMagnitudeID()) m = DataModel.Magnitude.Cast(obj) if m is not None: mType = m.type() mVal = str(m.magnitude().value()) if self._hideAuthor: mAuthor = '' else: try: mAuthor = m.creationInfo().author() except ValueError: pass # event description dbq.loadEventDescriptions(e) region = '' for i in xrange(e.eventDescriptionCount()): ed = e.eventDescription(i) if ed.type() == DataModel.REGION_NAME: region = ed.text() break if req._disconnected: return False line = "%s|%s|%f|%f|%s|%s||%s|%s|%s|%s|%s|%s\n" % ( eID, o.time().value().toString(df), o.latitude().value(), o.longitude().value(), depth, author, contrib, eID, mType, mVal, mAuthor, region) utils.writeTS(req, line) lineCount +=1 byteCount += len(line) # write response Logging.debug("%s: returned %i events (total bytes: %i) " % ( ro.service, lineCount, byteCount)) utils.accessLog(req, ro, http.OK, byteCount, None) return True
def _processRequestExp(self, req, ro, dbq, exp, ep): objCount = ep.eventCount() maxObj = Application.Instance()._queryObjects if not HTTP.checkObjects(req, objCount, maxObj): return False pickIDs = set() if ro.picks is None: ro.picks = True # add related information for iEvent in xrange(ep.eventCount()): if req._disconnected: return False e = ep.event(iEvent) if self._hideAuthor: self._removeAuthor(e) # eventDescriptions and comments objCount += dbq.loadEventDescriptions(e) if ro.comments: objCount += self._loadComments(dbq, e) if not HTTP.checkObjects(req, objCount, maxObj): return False # origin references: either all or preferred only dbIter = dbq.getObjects(e, DataModel.OriginReference.TypeInfo()) for obj in dbIter: oRef = DataModel.OriginReference.Cast(obj) if oRef is None: continue if ro.allOrigins: e.add(oRef) elif oRef.originID() == e.preferredOriginID(): e.add(oRef) dbIter.close() # TODO: if focal mechanisms are added make sure derived # origin is loaded objCount += e.originReferenceCount() if not HTTP.checkObjects(req, objCount, maxObj): return False # TODO: add focal mechanisms # origins for iORef in xrange(e.originReferenceCount()): if req._disconnected: return False oID = e.originReference(iORef).originID() obj = dbq.getObject(DataModel.Origin.TypeInfo(), oID) o = DataModel.Origin.Cast(obj) if o is None: continue ep.add(o) objCount += 1 if self._hideAuthor: self._removeAuthor(o) # comments if ro.comments: objCount += self._loadComments(dbq, o) if not HTTP.checkObjects(req, objCount, maxObj): return False # magnitudes dbIter = dbq.getObjects(oID, DataModel.Magnitude.TypeInfo()) for obj in dbIter: mag = DataModel.Magnitude.Cast(obj) if mag is None: continue if ro.allMags: o.add(mag) elif mag.publicID() == e.preferredMagnitudeID(): o.add(mag) dbIter.close() if self._hideAuthor: self._removeAuthor(mag) objCount += o.magnitudeCount() if ro.comments: for iMag in xrange(o.magnitudeCount()): objCount += self._loadComments(dbq, o.magnitude(iMag)) if not HTTP.checkObjects(req, objCount, maxObj): return False # TODO station magnitudes, amplitudes # - added pick id for each pick referenced by amplitude # arrivals if ro.arrivals: objCount += dbq.loadArrivals(o) if self._removeAuthor: for iArrival in xrange(o.arrivalCount()): self._removeAuthor(o.arrival(iArrival)) # collect pick IDs if requested if ro.picks: for iArrival in xrange(o.arrivalCount()): pickIDs.add(o.arrival(iArrival).pickID()) if not HTTP.checkObjects(req, objCount, maxObj): return False # picks if pickIDs: objCount += len(pickIDs) if not HTTP.checkObjects(req, objCount, maxObj): return False for pickID in pickIDs: obj = dbq.getObject(DataModel.Pick.TypeInfo(), pickID) pick = DataModel.Pick.Cast(obj) if pick is not None: if self._hideAuthor: self._removeAuthor(pick) if ro.comments: objCount += self._loadComments(dbq, pick) ep.add(pick) if not HTTP.checkObjects(req, objCount, maxObj): return False # write response sink = utils.Sink(req) if not exp.write(sink, ep): return False Logging.debug("%s: returned %i events and %i origins (total " \ "objects/bytes: %i/%i)" % (ro.service, ep.eventCount(), ep.originCount(), objCount, sink.written)) utils.accessLog(req, ro, http.OK, sink.written, None) return True
def _processRequestExp(self, req, ro, exp, dac): if req._disconnected: return False staCount, locCount, chaCount, extCount, objCount = 0, 0, 0, 0, 0 DataModel.PublicObject.SetRegistrationEnabled(False) newInv = DataModel.Inventory() dataloggers, sensors, extents = set(), set(), set() skipRestricted = not self._allowRestricted or \ (ro.restricted is not None and not ro.restricted) levelNet = not ro.includeSta levelSta = ro.includeSta and not ro.includeCha # iterate over inventory networks for net in ro.networkIter(self._inv, levelNet): if req._disconnected: return False if skipRestricted and utils.isRestricted(net): continue newNet = DataModel.Network(net) # Copy comments for i in xrange(net.commentCount()): newNet.add(DataModel.Comment(net.comment(i))) # iterate over inventory stations of current network for sta in ro.stationIter(net, levelSta): if req._disconnected: return False if skipRestricted and utils.isRestricted(sta): continue if not self.checkObjects(req, objCount, self._maxObj): return False if ro.includeCha: numCha, numLoc, d, s, e = \ self._processStation(newNet, net, sta, ro, dac, skipRestricted) if numCha > 0: locCount += numLoc chaCount += numCha extCount += len(e) objCount += numLoc + numCha + extCount if not self.checkObjects(req, objCount, self._maxObj): return False dataloggers |= d sensors |= s extents |= e elif self._matchStation(net, sta, ro, dac): if ro.includeSta: newSta = DataModel.Station(sta) # Copy comments for i in xrange(sta.commentCount()): newSta.add(DataModel.Comment(sta.comment(i))) newNet.add(newSta) else: # no station output requested: one matching station # is sufficient to include the network newInv.add(newNet) objCount += 1 break if newNet.stationCount() > 0: newInv.add(newNet) staCount += newNet.stationCount() objCount += staCount + 1 # Return 204 if no matching inventory was found if newInv.networkCount() == 0: msg = "no matching inventory found" data = self.renderErrorPage(req, http.NO_CONTENT, msg, ro) if data: utils.writeTS(req, data) return True # Copy references (dataloggers, responses, sensors) decCount, resCount = 0, 0 if ro.includeCha: decCount = self._copyReferences(newInv, req, objCount, self._inv, ro, dataloggers, sensors, self._maxObj) if decCount is None: return False else: resCount = newInv.responsePAZCount() + \ newInv.responseFIRCount() + \ newInv.responsePolynomialCount() + \ newInv.responseFAPCount() + \ newInv.responseIIRCount() objCount += resCount + decCount + newInv.dataloggerCount() + \ newInv.sensorCount() # Copy data extents objOut = newInv if len(extents) > 0: objCount += 1 da = DataModel.DataAvailability() for e in extents: da.add(DataModel.DataExtent(e)) objOut = ExportObjectList() objOut.append(newInv) objOut.append(da) sink = utils.Sink(req) if not exp.write(sink, objOut): return False Logging.debug( "%s: returned %iNet, %iSta, %iLoc, %iCha, " "%iDL, %iDec, %iSen, %iRes, %iDAExt (total objects/" "bytes: %i/%i) " % (ro.service, newInv.networkCount(), staCount, locCount, chaCount, newInv.dataloggerCount(), decCount, newInv.sensorCount(), resCount, extCount, objCount, sink.written)) utils.accessLog(req, ro, http.OK, sink.written, None) return True
def _processRequestExp(self, req, ro, dbq, exp, ep): objCount = ep.eventCount() maxObj = Application.Instance()._queryObjects if not HTTP.checkObjects(req, objCount, maxObj): return False pickIDs = set() if ro.picks is None: ro.picks = True # add related information for iEvent in xrange(ep.eventCount()): if req._disconnected: return False e = ep.event(iEvent) if self._hideAuthor: self._removeAuthor(e) # eventDescriptions and comments objCount += dbq.loadEventDescriptions(e) if ro.comments: objCount += self._loadComment(dbq, e) if not HTTP.checkObjects(req, objCount, maxObj): return False # origin references: either all or preferred only dbIter = dbq.getObjects(e, DataModel.OriginReference.TypeInfo()) for obj in dbIter: oRef = DataModel.OriginReference.Cast(obj) if oRef is None: continue if ro.allOrigins: e.add(oRef) elif oRef.originID() == e.preferredOriginID(): e.add(oRef) dbIter.close() # TODO: if focal mechanisms are added make sure derived # origin is loaded objCount += e.originReferenceCount() if not HTTP.checkObjects(req, objCount, maxObj): return False # TODO: add focal mechanisms # origins for iORef in xrange(e.originReferenceCount()): if req._disconnected: return False oID = e.originReference(iORef).originID() obj = dbq.getObject(DataModel.Origin.TypeInfo(), oID) o = DataModel.Origin.Cast(obj) if o is None: continue ep.add(o) objCount += 1 if self._hideAuthor: self._removeAuthor(o) # comments if ro.comments: objCount += self._loadComments(dbq, o) if not HTTP.checkObjects(req, objCount, maxObj): return False # magnitudes dbIter = dbq.getObjects(oID, DataModel.Magnitude.TypeInfo()) for obj in dbIter: mag = DataModel.Magnitude.Cast(obj) if mag is None: continue if ro.allMags: o.add(mag) elif mag.publicID() == e.preferredMagnitudeID(): o.add(mag) dbIter.close() if self._hideAuthor: self._removeAuthor(mag) objCount += o.magnitudeCount() if ro.comments: for iMag in xrange(o.magnitudeCount()): objCount += self._loadComments(dbq, o.magnitude(iMag)) if not HTTP.checkObjects(req, objCount, maxObj): return False # TODO station magnitudes, amplitudes # - added pick id for each pick referenced by amplitude # arrivals if ro.arrivals: objCount += dbq.loadArrivals(o) if self._removeAuthor: for iArrival in xrange(o.arrivalCount()): self._removeAuthor(o.arrival(iArrival)) # collect pick IDs if requested if ro.picks: for iArrival in xrange(o.arrivalCount()): pickIDs.add(o.arrival(iArrival).pickID()) if not HTTP.checkObjects(req, objCount, maxObj): return False # picks if pickIDs: objCount += len(pickIDs) if not HTTP.checkObjects(req, objCount, maxObj): return False for pickID in pickIDs: obj = dbq.getObject(DataModel.Pick.TypeInfo(), pickID) pick = DataModel.Pick.Cast(obj) if pick is not None: if self._hideAuthor: self._removeAuthor(pick) if ro.comments: objCount += self._loadComments(dbq, pick) ep.add(pick) if not HTTP.checkObjects(req, objCount, maxObj): return False # write response sink = utils.Sink(req) if not exp.write(sink, ep): return False Logging.debug("%s: returned %i events and %i origins (total " \ "objects/bytes: %i/%i)" % (ro.service, ep.eventCount(), ep.originCount(), objCount, sink.written)) utils.accessLog(req, ro, http.OK, sink.written, None) return True