def onCancel(failure, req): if failure: Logging.error("%s %s" % (failure.getErrorMessage(), traceback.format_tb(failure.getTracebackObject()))) else: Logging.error("request canceled") req.cancel()
def _finish(self): if self.stopped: return if self.written == 0: msg = "no waveform data found" errorpage = HTTP.renderErrorPage(self.req, http.NO_CONTENT, msg, VERSION, self.ro) if errorpage: self.req.write(errorpage) for tracker in self.trackerList: tracker.volume_status("fdsnws", "NODATA", 0, "") tracker.request_status("END", "") else: Logging.debug("%s: returned %i bytes of mseed data" % (self.ro.service, self.written)) utils.accessLog(self.req, self.ro, http.OK, self.written, None) for tracker in self.trackerList: tracker.volume_status("fdsnws", "OK", self.written, "") tracker.request_status("END", "") self.req.unregisterProducer() self.req.finish()
def run(self): retn = False try: for user in self._authBlacklist: self._userdb.blacklistUser(user) site = self._site() if not site: return False # start listen for incoming request self.__tcpPort = reactor.listenTCP(self._port, site, self._connections, self._listenAddress) # setup signal handler self.__sighup = False signal.signal(signal.SIGHUP, self._sighupHandler) task.LoopingCall(self._reloadTask).start(60, False) # start processing Logging.info("start listening") log.addObserver(logSC3) reactor.run() retn = True except Exception, e: Logging.error(str(e))
def __init__(self, app, da, validUntil): self._da = da self._validUntil = validUntil self._extents = {} self._extentsSorted = [] self._extentsOID = {} for i in xrange(self._da.dataExtentCount()): ext = self._da.dataExtent(i) wid = ext.waveformID() sid = "%s.%s.%s.%s" % (wid.networkCode(), wid.stationCode(), wid.locationCode(), wid.channelCode()) restricted = app._openStreams is None or sid not in app._openStreams if restricted and not app._allowRestricted: continue self._extents[sid] = (ext, restricted) # Logging.debug("%s: %s ~ %s" % (sid, ext.start().iso(), # ext.end().iso())) if app._serveAvailability: # load data attribute extents if availability is served for i in xrange(da.dataExtentCount()): extent = da.dataExtent(i) app.query().loadDataAttributeExtents(extent) # create a list of (extent, oid, restricted) tuples sorted by stream self._extentsSorted = [(e, app.query().getCachedId(e), res) for wid, (e, res) in sorted(self._extents.iteritems(), key=lambda t: t[0])] # create a dictionary of object ID to extents self._extentsOID = dict((oid, (e, res)) for (e, oid, res) in self._extentsSorted) Logging.info("loaded %i extents" % len(self._extents))
def resumeProducing(self): rec = None try: rec = self.rsInput.next() except Exception, e: Logging.warning("%s" % str(e))
def charstar(string): """Convert a string (unicode in python3, bytes in python2) to a char* usable as an argument to the seiscomp SWIG API. Depending on what version of seiscomp and python we're using, and whether seiscomp's SWIG bindings were generated with SWIG_PYTHON_2_UNICODE or not, the correct type to feed to C++ string/char* arguments can vary. Unfortunately, the seiscomp3 backwards-compat python wrapper doesn't compensate for this. I couldn't find a simple way to introspect the correct python type, so the first time this method is called it attempts to log a message to find out.""" global _charstar_is_bytes if _charstar_is_bytes is None: # first time we've been called - we need to detect. try: Logging.debug(b"Detected SWIG char* type as bytes") _charstar_is_bytes = True except TypeError: Logging.debug(u"Detecting SWIG char* type as unicode") _charstar_is_bytes = False if _charstar_is_bytes: if isinstance(string, bytes): return string else: return string.encode('utf-8') else: if isinstance(string, bytes): return string.decode('utf-8') else: return string
def resumeProducing(self): rec = None try: rec = self.rsInput.next() except Exception, e: Logging.warning("%s" % str(e)) if self.written == 0: # read first record to test if any data exists at all if not rec: msg = "no waveform data found" data = HTTP.renderErrorPage(self.req, http.NO_CONTENT, msg, self.ro) if data: self.req.write(data) self.req.unregisterProducer() self.req.finish() return self.req.setHeader('Content-Type', 'application/vnd.fdsn.mseed') self.req.setHeader('Content-Disposition', "attachment; " \ "filename=%s" % self.fileName) if not rec: self.req.unregisterProducer() Logging.debug("%s: returned %i bytes of mseed data" % ( self.ro.service, self.written)) utils.accessLog(self.req, self.ro, http.OK, self.written, None) self.req.finish() return data = rec.raw().str() self.req.write(data) self.written += len(data)
def createCommandLineDescription(self): try: self.commandline().addGroup("Parameters") self.commandline().addStringOption("Parameters", "coord", "lat,lon,dep of origin") self.commandline().addStringOption("Parameters", "time", "time of origin") except: Logging.warning("caught unexpected error %s" % sys.exc_info())
class AuthResource(resource.Resource): isLeaf = True def __init__(self, gnupghome, userdb): resource.Resource.__init__(self) self.__gpg = gnupg.GPG(gnupghome=gnupghome) self.__userdb = userdb #--------------------------------------------------------------------------- def render_POST(self, request): request.setHeader('Content-Type', 'text/plain') try: verified = self.__gpg.decrypt(request.content.getvalue()) except Exception, e: msg = "invalid token" Logging.warning("%s: %s" % (msg, str(e))) return HTTP.renderErrorPage(request, http.BAD_REQUEST, msg, None) if verified.trust_level is None or verified.trust_level < verified.TRUST_FULLY: msg = "token has invalid signature" Logging.warning(msg) return HTTP.renderErrorPage(request, http.BAD_REQUEST, msg, None) try: attributes = json.loads(verified.data) td = dateutil.parser.parse(attributes['valid_until']) - \ datetime.datetime.now(dateutil.tz.tzutc()) lifetime = td.seconds + td.days * 24 * 3600 except Exception, e: msg = "token has invalid validity" Logging.warning("%s: %s" % (msg, str(e))) return HTTP.renderErrorPage(request, http.BAD_REQUEST, msg, None)
def _processRequest(self, req, ro, dbq, exp): if req._disconnected: return False DataModel.PublicObject.SetRegistrationEnabled(False) # query event(s) ep = DataModel.EventParameters() if ro.eventIDs: for eID in ro.eventIDs: event = dbq.getEventByPublicID(eID) event = DataModel.Event.Cast(event) if event: ep.add(event) else: self._findEvents(ep, ro, dbq) if ep.eventCount() == 0: msg = "no matching events found" utils.writeTS(req, HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro)) return False Logging.debug("events found: %i" % ep.eventCount()) if ro.format == 'csv' or not exp: req.setHeader('Content-Type', 'text/plain') else: req.setHeader('Content-Type', 'application/xml') if exp: return self._processRequestExp(req, ro, dbq, exp, ep) return self._processRequestText(req, ro, dbq, ep)
def renderErrorPage(request, code, msg, ro=None): resp = """\ Error %i: %s %s Usage details are available from %s Request: %s Request Submitted: %s Service Version: %s """ # rewrite response code if requested and no data was found if ro is not None and code == http.NO_CONTENT: code = ro.noData request.setHeader('Content-Type', 'text/plain') request.setResponseCode(code) reference = "%s/" % (request.path.rpartition('/')[0]) codeStr = http.RESPONSES[code] Logging.warning("responding with error: %i (%s)" % (code, codeStr)) date = Core.Time.GMT().toString("%FT%T.%f") response = resp % (code, codeStr, msg, reference, request.uri, date, VERSION) utils.accessLog(request, ro, code, len(response), msg) return response
def _processRequest(self, req, ro, dbq, exp): if req._disconnected: return False DataModel.PublicObject.SetRegistrationEnabled(False) # query event(s) ep = DataModel.EventParameters() if ro.eventIDs: for eID in ro.eventIDs: obj = dbq.getEventByPublicID(eID) e = DataModel.Event.Cast(obj) if not e: continue if self._eventTypeWhitelist or self._eventTypeBlacklist: eType = -1 try: eType = e.type() except ValueError: pass if self._eventTypeWhitelist and \ not eType in self._eventTypeWhitelist: continue if self._eventTypeBlacklist and \ eType in self._eventTypeBlacklist: continue if self._evaluationMode is not None: obj = dbq.getObject(DataModel.Origin.TypeInfo(), e.preferredOriginID()) o = DataModel.Origin.Cast(obj) try: if o is None or \ o.evaluationMode() != self._evaluationMode: continue except ValueError: continue ep.add(e) else: self._findEvents(ep, ro, dbq) if ep.eventCount() == 0: msg = "no matching events found" self.writeErrorPage(req, http.NO_CONTENT, msg, ro) return True Logging.debug("events found: %i" % ep.eventCount()) if ro.format == 'csv' or not exp: req.setHeader('Content-Type', 'text/plain') else: req.setHeader('Content-Type', 'application/xml') if exp: return self._processRequestExp(req, ro, dbq, exp, ep) return self._processRequestText(req, ro, dbq, ep)
def onRequestServed(success, req): if req._disconnected: Logging.debug("Request aborted") return Logging.debug("Request %s" % ( "successfully served" if success else "failed")) reactor.callFromThread(req.finish)
def getResourceFor(self, request): Logging.debug("request (%s): %s" % (request.getClientIP(), request.uri)) request.setHeader('Server', "SeisComP3-FDSNWS/%s" % VERSION) request.setHeader('Access-Control-Allow-Origin', '*') request.setHeader('Access-Control-Allow-Headers', 'Authorization') request.setHeader('Access-Control-Expose-Headers', 'WWW-Authenticate') return server.Site.getResourceFor(self, request)
def render_GET(self, req): # Parse and validate GET parameters ro = _EventRequestOptions(req.args) try: ro.parse() except ValueError, e: Logging.warning(str(e)) return HTTP.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)
def __init__(self, argc, argv): # Log all messages to a file for S3 self._logfile_for_s3 = NamedTemporaryFile() self._logger_for_s3 = Logging.FileOutput(self._logfile_for_s3.name) for level in ('notice', 'error', 'warning', 'info', 'debug'): self._logger_for_s3.subscribe(Logging.getGlobalChannel(level)) Application.__init__(self, argc, argv) # default location to write outputs to self.output = settings.OUTPUT_DIR self.filename = None self.mag_type = None self.mag_value = None self.server = 'IRIS' self.fdsn_client = None self.networks = 'ALL' self.region = 'not specified' self.evid = None self.resultid = None self.notificationemail = None self.fromemail = None self.email_aws_region = None self.email_method = 'ses' self.email_subject_postfix = '' self.email_subject_prefix = '' self.smtp_server = None self.smtp_port = 25 self.smtp_ssl = False self.smtp_tls = False self.smtp_user = None self.smtp_password = None self.write_s3 = False self.bucket_name = None self.agency = 'GA' self.make_maps = True self.overwrite = False self.save_waveforms = None self.save_inventory = None self.waveforms = None self.inventory = None self.eqinfo: Optional[model.Event] = None # enable messaging support self.setMessagingEnabled(True) # disable database access self.setDatabaseEnabled(False, False) # default spread username self.setMessagingUsername("gawphase") # send all objects to the focal mechanism group self.setPrimaryMessagingGroup("FOCMECH")
def _processRequest(self, req, ro, dbq, exp): if req._disconnected: return False DataModel.PublicObject.SetRegistrationEnabled(False) # query event(s) ep = DataModel.EventParameters() if ro.eventIDs: for eID in ro.eventIDs: obj = dbq.getEventByPublicID(eID) e = DataModel.Event.Cast(obj) if not e: continue if self._eventTypeWhitelist or self._eventTypeBlacklist: eType = None try: eType = DataModel.EEventTypeNames_name(e.type()) except ValueException: pass if self._eventTypeWhitelist and \ not eType in self._eventTypeWhitelist: continue if self._eventTypeBlacklist and \ eType in self._eventTypeBlacklist: continue if self._evaluationMode is not None: obj = dbq.getObject(DataModel.Origin.TypeInfo(), e.preferredOriginID()) o = DataModel.Origin.Cast(obj) try: if o is None or \ o.evaluationMode() != self._evaluationMode: continue except ValueException: continue ep.add(e) else: self._findEvents(ep, ro, dbq) if ep.eventCount() == 0: msg = "no matching events found" data = HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro) if data: utils.writeTS(req, data) return True Logging.debug("events found: %i" % ep.eventCount()) if ro.format == 'csv' or not exp: req.setHeader('Content-Type', 'text/plain') else: req.setHeader('Content-Type', 'application/xml') if exp: return self._processRequestExp(req, ro, dbq, exp, ep) return self._processRequestText(req, ro, dbq, ep)
def render_POST(self, req): # Parse and validate POST parameters ro = _StationRequestOptions() try: ro.parsePOST(req.content) ro.parse() except ValueError, e: Logging.warning(str(e)) return self.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)
def render_POST(self, req): # Parse and validate POST parameters ro = _StationRequestOptions() try: ro.parsePOST(req.content) ro.parse() except ValueError, e: Logging.warning(str(e)) return HTTP.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)
def render_GET(self, req): # Parse and validate GET parameters ro = _StationRequestOptions(req.args) try: ro.parse() # the GET operation supports exactly one stream filter ro.streams.append(ro) except ValueError, e: Logging.warning(str(e)) return self.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)
def render_POST(self, req): # Parse and validate POST parameters ro = _DataSelectRequestOptions() ro.userName = self.__user and self.__user.get('mail') try: ro.parsePOST(req.content) ro.parse() except ValueError, e: Logging.warning(str(e)) return HTTP.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)
def render_GET(self, req): # Parse and validate GET parameters ro = _StationRequestOptions(req.args) try: ro.parse() # the GET operation supports exactly one stream filter ro.streams.append(ro) except ValueError, e: Logging.warning(str(e)) return HTTP.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)
def render_POST(self, request): request.setHeader('Content-Type', 'text/plain') try: verified = self.__gpg.decrypt(request.content.getvalue()) except Exception, e: msg = "invalid token" Logging.warning("%s: %s" % (msg, str(e))) return HTTP.renderErrorPage(request, http.BAD_REQUEST, msg, None)
def render_POST(self, request): request.setHeader('Content-Type', 'text/plain') try: verified = self.__gpg.decrypt(request.content.getvalue()) except OSError, e: msg = "gpg decrypt error" Logging.warning("%s: %s" % (msg, str(e))) return HTTP.renderErrorPage(request, http.INTERNAL_SERVER_ERROR, msg, None)
def render_GET(self, req): # Parse and validate GET parameters ro = _EventRequestOptions() try: ro.parseGET(req.args) ro.parse() except ValueError as e: Logging.warning(str(e)) return self.renderErrorPage(req, http.BAD_REQUEST, str(e), ro) # Catalog filter is not supported if ro.catalogs: msg = "catalog filter not supported" return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro) if ro.comments and self._hideComments: msg = "including of comments not supported" return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro) # updateafter not implemented if ro.updatedAfter: msg = "filtering based on update time not supported" return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro) if self._formatList is not None and ro.format not in self._formatList: msg = "output format '%s' not available" % ro.format return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro) # Exporter, 'None' is used for text output if ro.format in ro.VText: exp = None else: exp = Exporter.Create(ro.Exporters[ro.format]) if exp: exp.setFormattedOutput(bool(ro.formatted)) else: msg = "output format '%s' not available, export module '%s' could " \ "not be loaded." % (ro.format, ro.Exporters[ro.format]) return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro) # Create database query db = DatabaseInterface.Open(Application.Instance().databaseURI()) if db is None: msg = "could not connect to database" return self.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) dbq = DataModel.DatabaseQuery(db) # Process request in separate thread d = deferToThread(self._processRequest, req, ro, dbq, exp) req.notifyFinish().addErrback(utils.onCancel, d) d.addBoth(utils.onFinish, req) # The request is handled by the deferred object return server.NOT_DONE_YET
def run(self): self.loadStreams() try: if self.inputFile == '-': f = sys.stdin else: f = open(self.inputFile) except IOError, e: Logging.error(str(e)) return False
def render_GET(self, req): # Parse and validate POST parameters ro = _DataSelectRequestOptions(req.args) ro.userName = self.__user and self.__user.get('mail') try: ro.parse() # the GET operation supports exactly one stream filter ro.streams.append(ro) except ValueError, e: Logging.warning(str(e)) return HTTP.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)
def resumeProducing(self): rec = None data = "" while len(data) < self.bufSize: try: rec = self.rsInput.next() if rec: data += rec.raw().str() else: break except Exception, e: Logging.warning("%s" % str(e)) break
def render_POST(self, request): request.setHeader('Content-Type', 'text/plain') try: verified = self.__gpg.decrypt(request.content.getvalue()) except OSError, e: msg = "gpg decrypt error" Logging.warning("%s: %s" % (msg, str(e))) return self.renderErrorPage(request, http.INTERNAL_SERVER_ERROR, msg)
def addUser(self, name, attributes, expires, data): try: password = self.__users[name][0] except KeyError: bl = " (blacklisted)" if name in self.__blacklist else "" Logging.notice("registering %s%s %s" % (name, bl, data)) password = base64.urlsafe_b64encode(os.urandom(12)) attributes['blacklisted'] = name in self.__blacklist self.__users[name] = (password, attributes, expires) return password
def logSC3(entry): try: isError = entry['isError'] msg = entry['message'] if isError: for l in msg: Logging.error("[reactor] %s" % l) else: for l in msg: Logging.info("[reactor] %s" % l) except: pass
def render_GET(self, req): # No archive no service if not os.path.isdir(self._sdsPath): msg = "SDS archive not found: %s" % self._sdsPath return HTTP.renderErrorPage(request, http.SERVICE_UNAVAILABLE, msg) # Parse and validate GET parameters try: ro = _DataSelectRequestOptions(req.args) ro.parse() except ValueError, e: Logging.warning(str(e)) return HTTP.renderErrorPage(req, http.BAD_REQUEST, str(e))
def run(self): rules = self.rules iv = Client.Inventory.Instance().inventory() if not rules: return False if not iv: return False Logging.debug("Loaded %d networks" % iv.networkCount()) if self.outputFile is None: DataModel.Notifier.Enable() self.setInterpretNotifierEnabled(True) for net in self._loop(iv.network, iv.networkCount()): (ncode, nstart, nend) = self._collect(net) key = rules.findKey(ncode, nstart, nend) if not key: continue att = rules.getNetworkAttributes(key) self._modifyInventory("network", net, att) Logging.info("%s %s" % (ncode, att)) for sta in self._loop(net.station, net.stationCount()): (scode, sstart, send) = self._collect(sta) att = rules.getStationAttributes(key, ncode, scode, None, None, sstart, send) self._modifyInventory("station", sta, att) if att: Logging.info(" %s %s" % (scode, att)) for loc in self._loop(sta.sensorLocation, sta.sensorLocationCount()): (lcode, lstart, lend) = self._collect(loc) att = rules.getStationAttributes(key, ncode, scode, lcode, None, lstart, lend) self._modifyInventory("location", loc, att) if att: Logging.info(" %s %s" % (lcode, att)) for cha in self._loop(loc.stream, loc.streamCount()): (ccode, cstart, cend) = self._collect(cha) att = rules.getStationAttributes( key, ncode, scode, lcode, ccode, cstart, cend) self._modifyInventory("channel", cha, att) if att: Logging.info(" %s %s" % (ccode, att)) for sensor in self._loop(iv.sensor, iv.sensorCount()): att = rules.getInstrumentsAttributes(sensor.name(), "Se") self._modifyInventory("sensor", sensor, att) for datalogger in self._loop(iv.datalogger, iv.dataloggerCount()): att = rules.getInstrumentsAttributes(datalogger.name(), "Dl") self._modifyInventory("datalogger", datalogger, att) return True
def run(self): rules = self.rules iv = Client.Inventory.Instance().inventory() if not rules: return False if not iv: return False Logging.debug("Loaded %d networks" % iv.networkCount()) if self.outputFile is None: DataModel.Notifier.Enable() self.setInterpretNotifierEnabled(True) for net in self._loop(iv.network, iv.networkCount()): (ncode, nstart, nend) = self._collect(net) key = rules.findKey(ncode, nstart, nend) if not key: continue att = rules.getNetworkAttributes(key) self._modifyInventory("network", net, att) Logging.info("%s %s" % (ncode, att)) for sta in self._loop(net.station, net.stationCount()): (scode, sstart, send) = self._collect(sta) att = rules.getStationAttributes(key, ncode, scode, None, None, sstart, send) self._modifyInventory("station", sta, att) if att: Logging.info(" %s %s" % (scode, att)) for loc in self._loop(sta.sensorLocation, sta.sensorLocationCount()): (lcode, lstart, lend) = self._collect(loc) att = rules.getStationAttributes(key, ncode, scode, lcode, None, lstart, lend) self._modifyInventory("location", loc, att) if att: Logging.info(" %s %s" % (lcode, att)) for cha in self._loop(loc.stream, loc.streamCount()): (ccode, cstart, cend) = self._collect(cha) att = rules.getStationAttributes(key, ncode, scode, lcode, ccode, cstart, cend) self._modifyInventory("channel", cha, att) if att: Logging.info(" %s %s" % (ccode, att)) for sensor in self._loop(iv.sensor, iv.sensorCount()): att = rules.getInstrumentsAttributes(sensor.name(), "Se") self._modifyInventory("sensor", sensor, att) for datalogger in self._loop(iv.datalogger, iv.dataloggerCount()): att = rules.getInstrumentsAttributes(datalogger.name(), "Dl") self._modifyInventory("datalogger", datalogger, att) return True
def stopProducing(self): self.stopped = True Logging.debug("%s: returned %i bytes of mseed data (not completed)" % (self.ro.service, self.written)) utils.accessLog(self.req, self.ro, http.OK, self.written, "not completed") for tracker in self.trackerList: tracker.volume_status("fdsnws", "ERROR", self.written, "") tracker.request_status("END", "") self.req.unregisterProducer() self.req.finish()
def __init__(self, config, name, group): mediatorAddress = config.getString("connection.server") dbDriverName = config.getString("database.type") dbAddress = config.getString("database.parameters") connection = Communication.Connection.Create(mediatorAddress, name, group) if connection is None: Logging.error("Cannot connect to Mediator") raise ConnectionError, "connection could not be established" else: Logging.info("Connection has been established") dbDriver = IO.DatabaseInterface.Create(dbDriverName) if dbDriver is None: Logging.error("Cannot find database driver " + dbDriverName) raise DatabaseError, "driver not found" if not dbDriver.connect(dbAddress): Logging.error("Cannot connect to database at " + dbAddress) raise DatabaseError, "connection could not be established" self.__connection = connection # This reference to dbDriver is essential, since dbQuery becomes # invalid when dbDriver is deleted self.__dbDriver = dbDriver self.dbQuery = DatabaseQuery(dbDriver)
def get_closest_city(latitude, longitude): try: query = '%s/get_nearest_city?lat=%s&lon=%s&token=%s' % ( cfg.geolocation_service_url, latitude, longitude, cfg.geolocation_service_token) result = requests.get(query) distance, city, province = result.text.strip('()').encode( 'utf-8', errors='ignore').split(',') return 'a %s de %s, %s' % (distance, city.strip(" '"), province.strip(" '")) except Exception as e: msg_error = "##Error in get_closest_city:%s" % str(e) print(msg_error) logging.error(msg_error) return '---'
def __getDayRaw(self, day, startt, endt, net, sta, loc, cha, bufferSize): # Take into account the case of empty location if loc == '--': loc = '' for dataFile in self.__getMSName(day, net, sta, loc, cha): if not os.path.exists(dataFile): continue try: with open(dataFile, 'rb') as msFile: for buf in self.__getWaveform(startt, endt, msFile, bufferSize): yield buf except mseedlite.MSeedError as e: Logging.error("%s: %s" % (dataFile, str(e)))
class AuthResource(BaseResource): isLeaf = True def __init__(self, version, gnupghome, userdb): BaseResource.__init__(self, version) self.__gpg = gnupg.GPG(gnupghome=gnupghome) self.__userdb = userdb #--------------------------------------------------------------------------- def render_POST(self, request): request.setHeader('Content-Type', 'text/plain') try: verified = self.__gpg.decrypt(request.content.getvalue()) except OSError, e: msg = "gpg decrypt error" Logging.warning("%s: %s" % (msg, str(e))) return self.renderErrorPage(request, http.INTERNAL_SERVER_ERROR, msg) except Exception, e: msg = "invalid token" Logging.warning("%s: %s" % (msg, str(e))) return self.renderErrorPage(request, http.BAD_REQUEST, msg)
def renderErrorPage(request, code, msg): html = """\ <html> <head><title>%i - %s</title></head> <body> <h1>%s</h1> <p>%s</p> </body> </html>""" request.setHeader("Content-Type", "text/html") request.setResponseCode(code) codeStr = http.RESPONSES[code] Logging.warning("Responding with error: %i (%s)" % (code, codeStr)) return html % (code, codeStr, codeStr, msg)
def onFinish(result, req): Logging.debug("finish value = %s" % str(result)) if isinstance(result, Failure): err = result.value if isinstance(err, defer.CancelledError): Logging.error("request canceled") return Logging.error("%s %s" % (result.getErrorMessage(), traceback.format_tb(result.getTracebackObject()))) else: if result: Logging.debug("request successfully served") else: Logging.debug("request failed") reactor.callFromThread(req.finish)
def resumeProducing(self): rec = None try: rec = self.rsInput.next() except Exception, e: Logging.warning("%s" % str(e)) # e.g. ArchiveException if not self.initialized: self.initialized = True # read first record to test if any data exists at all if not rec: msg = "No waveform data found" self.req.write(HTTP.renderErrorPage(self.req, http.NOT_FOUND, msg)) self.req.unregisterProducer() self.req.finish() return self.req.setHeader("Content-Type", "application/vnd.fdsn.mseed") self.req.setHeader("Content-Disposition", "attachment; filename=%s" % \ self.fileName) if not rec: self.req.unregisterProducer() self.req.finish() return self.req.write(rec.raw().str())
def open_SC3Db(self, dbUrl): m = re.match("(?P<dbDriverName>^.*):\/\/(?P<dbAddress>.+?:.+?@.+?\/.+$)", dbUrl) if not m: raise SystemExit, "error in parsing SC3 DB url" _dbUrl = m.groupdict() dbDriver = IO.DatabaseInterface.Create(_dbUrl["dbDriverName"]) if dbDriver is None: Logging.error("Cannot find database driver " + _dbUrl["dbDriverName"]) raise SystemExit, "driver not found" if not dbDriver.connect(_dbUrl["dbAddress"]): Logging.error("Cannot connect to database at " + _dbUrl["dbAddress"]) raise SystemExit, "connection could not be established" print "opening destination Database: " + _dbUrl["dbAddress"] dbQuery = DataModel.DatabaseQuery(dbDriver) sc3wrap.dbQuery = dbQuery return dbQuery
def _addStream(self, ro, streams, toks, lastFileName): start, end = Time(), Time() if start.fromString("%s.%s" % (toks[4], toks[5]), "%Y.%j") and \ end.fromString(lastFileName[-8:] + "23:59:59", "%Y.%j%T"): # match time if ro.time.start > end or \ (ro.time.end and ro.time.end < start): return # limit time to requested time if ro.time.start > start: start = ro.time.start if ro.time.end and ro.time.end < end: end = ro.time.end streams.append((toks[1], toks[2], start, end)) else: Logging.warning("invalid stream information: %s%s.%s" % ( toks[0], toks[1], toks[2]))
def send_notifiers(self, group): Nsize = DataModel.Notifier.Size() if Nsize > 0: Logging.info("trying to apply %d change%s" % (Nsize, "s" if Nsize != 1 else "")) else: Logging.info("no changes to apply") return 0 Nmsg = DataModel.Notifier.GetMessage(True) it = Nmsg.iter() msg = DataModel.NotifierMessage() maxmsg = 100 sent = 0 mcount = 0 try: try: while it.get(): msg.attach(DataModel.Notifier_Cast(it.get())) mcount += 1 if msg and mcount == maxmsg: sent += mcount Logging.debug("sending message (%5.1f %%)" % (sent / float(Nsize) * 100.0)) self.send(group, msg) msg.clear() mcount = 0 self.sync() it.next() except: pass finally: if msg.size(): Logging.debug("sending message (%5.1f %%)" % 100.0) self.send(group, msg) msg.clear() self.sync() Logging.info("done") return mcount
def _finish(self): if self.written == 0: msg = "no waveform data found" HTTP.renderErrorPage(self.req, http.NO_CONTENT, msg, self.ro) if self.tracker: self.tracker.volume_status("fdsnws", "NODATA", 0, "") self.tracker.request_status("END", "") else: Logging.debug("%s: returned %i bytes of mseed data" % ( self.ro.service, self.written)) utils.accessLog(self.req, self.ro, http.OK, self.written, None) if self.tracker: self.tracker.volume_status("fdsnws", "OK", self.written, "") self.tracker.request_status("END", "") self.req.unregisterProducer() self.req.finish()