def updateObject(self, parentID, arg0): try: obj = datamodel.Event.Cast(arg0) if obj: org = self._cache.get(datamodel.Origin, obj.preferredOriginID()) agencyID = org.creationInfo().agencyID() logging.debug("update event '%s'" % obj.publicID()) if not self._agencyIDs or agencyID in self._agencyIDs: self.notifyEvent(obj, False) except BaseException: info = traceback.format_exception(*sys.exc_info()) for i in info: sys.stderr.write(i)
def addObject(self, parentID, arg0): #pylint: disable=W0622 try: obj = datamodel.Amplitude.Cast(arg0) if obj: if obj.type() == self._ampType: logging.debug("got new %s amplitude '%s'" % (self._ampType, obj.publicID())) self.notifyAmplitude(obj) obj = datamodel.Origin.Cast(arg0) if obj: self._cache.feed(obj) logging.debug("got new origin '%s'" % obj.publicID()) try: if obj.evaluationStatus() == datamodel.PRELIMINARY: self.runAlert(obj.latitude().value(), obj.longitude().value()) except BaseException: pass return obj = datamodel.Magnitude.Cast(arg0) if obj: self._cache.feed(obj) logging.debug("got new magnitude '%s'" % obj.publicID()) return obj = datamodel.Event.Cast(arg0) if obj: org = self._cache.get(datamodel.Origin, obj.preferredOriginID()) agencyID = org.creationInfo().agencyID() logging.debug("got new event '%s'" % obj.publicID()) if not self._agencyIDs or agencyID in self._agencyIDs: self.notifyEvent(obj, True) except BaseException: info = traceback.format_exception(*sys.exc_info()) for i in info: sys.stderr.write(i)
def init(self): if not sc_client.Application.init(self): return False try: start = self.commandline().optionString("begin") except BaseException: start = "1900-01-01T00:00:00Z" self._startTime = _parseTime(start) if self._startTime is None: sc_logging.error("Wrong 'begin' format '%s'" % start) return False sc_logging.debug("Setting start to %s" % self._startTime.toString("%FT%TZ")) try: end = self.commandline().optionString("end") except BaseException: end = "2500-01-01T00:00:00Z" self._endTime = _parseTime(end) if self._endTime is None: sc_logging.error("Wrong 'end' format '%s'" % end) return False sc_logging.debug("Setting end to %s" % self._endTime.toString("%FT%TZ")) try: modifiedAfter = self.commandline().optionString("modified-after") self._modifiedAfterTime = _parseTime(modifiedAfter) if self._modifiedAfterTime is None: sc_logging.error( "Wrong 'modified-after' format '%s'" % modifiedAfter) return False sc_logging.debug( "Setting 'modified-after' time to %s" % self._modifiedAfterTime.toString("%FT%TZ")) except BaseException: pass try: self.evtypes = self.commandline().optionString("ev-type") except BaseException: self.evtypes = None try: self.orgType = self.commandline().optionString("org-type") except BaseException: self.orgType = "preferred" self.simple = self.commandline().hasOption("simple") self.manualOnly = self.commandline().hasOption("manual-only") self.automaticOnly = self.commandline().hasOption("auto-only") try: self.incAuthor = self.commandline().optionString("inc-author").split(',') except BaseException: self.incAuthor = None try: self.exclAuthor = self.commandline().optionString("excl-author").split(',') except BaseException: self.exclAuthor = None try: self.incAgencyID = self.commandline().optionString("inc-agency").split(',') except BaseException: self.incAgencyID = None try: self.exclAgencyID = self.commandline().optionString("excl-agency").split(',') except BaseException: self.exclAgencyID = None try: self.incMethodID = self.commandline().optionString("inc-method").split(',') except BaseException: self.incMethodID = None try: self.exclMethodID = self.commandline().optionString("excl-method").split(',') except BaseException: self.exclMethodID = None try: tokens = self.commandline().optionString("area").split(',') Area = namedtuple('Area', 'minLat minLon maxLat maxLon') self.area = Area(float(tokens[0]), float(tokens[1]), float(tokens[2]), float(tokens[3])) except BaseException: self.area = None return True
def _processRequest(self, req, ro, dac): if req._disconnected: return False # tuples: wid, attribute extent, restricted status lines = [] mergeAll = ro.mergeQuality and ro.mergeSampleRate mergeNone = not ro.mergeQuality and not ro.mergeSampleRate # iterate extents for ext, _, restricted in ro.extentIter(dac, self.user, self.access): if req._disconnected: return False # iterate attribute extents and merge them if requested if mergeNone: for e in ro.attributeExtentIter(ext): lines.append((ext, e, restricted)) elif mergeAll: e = self._mergeExtents(ro.attributeExtentIter(ext)) if e is not None: lines.append((ext, e, restricted)) elif ro.mergeQuality: eDict = {} # key=sampleRate for e in ro.attributeExtentIter(ext): if e.sampleRate() in eDict: eDict[e.sampleRate()].append(e) else: eDict[e.sampleRate()] = [e] for k in eDict: e = self._mergeExtents(eDict[k]) lines.append((ext, e, restricted)) else: eDict = {} # key=quality for e in ro.attributeExtentIter(ext): if e.quality() in eDict: eDict[e.quality()].append(e) else: eDict[e.quality()] = [e] for k in eDict: e = self._mergeExtents(eDict[k]) lines.append((ext, e, restricted)) # Return 204 if no matching availability information was found if len(lines) == 0: msg = "no matching availabilty information found" self.writeErrorPage(req, http.NO_CONTENT, msg, ro) return True # sort lines self._sortLines(lines, ro) # truncate lines to requested row limit if ro.limit: del lines[ro.limit:] byteCount, extCount = self._writeLines(req, lines, ro) logging.debug("%s: returned %i extents (total bytes: %i)" % (ro.service, extCount, byteCount)) utils.accessLog(req, ro, http.OK, byteCount, None) return True
def _processRequest(self, req, ro, dac): if req._disconnected: return False # tuples: wid, segment, restricted status lines = [] byteCount = 0 # iterate extents and create IN clauses of parent_oids in bunches # of 1000 because the query size is limited parentOIDs, idList, tooLarge = [], [], [] i = 0 for ext, objID, _ in ro.extentIter(dac, self.user, self.access): if req._disconnected: return False if ro.excludeTooLarge: if ext.segmentOverflow(): continue elif ext.segmentOverflow(): tooLarge.append(ext) continue elif tooLarge: continue if i < 1000: idList.append(objID) i += 1 else: parentOIDs.append(idList) idList = [objID] i = 1 if not ro.excludeTooLarge and tooLarge: extents = ', '.join( '{0}.{1}.{2}.{3}'.format(e.waveformID().networkCode(), e.waveformID().stationCode(), e.waveformID().locationCode(), e.waveformID().channelCode()) for e in tooLarge) msg = 'Unable to process request due to database limitations. ' \ 'Some selections have too many segments to process. ' \ 'Rejected extents: {{{0}}}. This limitation may be ' \ 'resolved in a future version of this webservice.' \ .format(extents) self.writeErrorPage(req, http.REQUEST_ENTITY_TOO_LARGE, msg, ro) return False if len(idList) > 0: parentOIDs.append(idList) else: msg = "no matching availabilty information found" self.writeErrorPage(req, http.NO_CONTENT, msg, ro) return False db = io.DatabaseInterface.Open(Application.Instance().databaseURI()) if db is None: msg = "could not connect to database" return self.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) lines = self._lineIter(db, parentOIDs, req, ro, dac.extentsOID()) byteCount, segCount = self._writeLines(req, lines, ro) # Return 204 if no matching availability information was found if segCount <= 0: msg = "no matching availabilty information found" self.writeErrorPage(req, http.NO_CONTENT, msg, ro) return True logging.debug("%s: returned %i segments (total bytes: %i)" % (ro.service, segCount, byteCount)) utils.accessLog(req, ro, http.OK, byteCount, None) return True
def notifyEvent(self, evt, newEvent=True): try: org = self._cache.get(datamodel.Origin, evt.preferredOriginID()) if not org: logging.warning("unable to get origin %s, ignoring event " "message" % evt.preferredOriginID()) return preliminary = False try: if org.evaluationStatus() == datamodel.PRELIMINARY: preliminary = True except BaseException: pass if not preliminary: nmag = self._cache.get(datamodel.Magnitude, evt.preferredMagnitudeID()) if nmag: mag = nmag.magnitude().value() mag = "magnitude %.1f" % mag else: if len(evt.preferredMagnitudeID()) > 0: logging.warning( "unable to get magnitude %s, ignoring event " "message" % evt.preferredMagnitudeID()) else: logging.warning( "no preferred magnitude yet, ignoring event message" ) return # keep track of old events if self._newWhenFirstSeen: if evt.publicID() in self._prevMessage: newEvent = False else: newEvent = True dsc = seismology.Regions.getRegionName(org.latitude().value(), org.longitude().value()) if self._eventDescriptionPattern: try: city, dist, _ = self.nearestCity(org.latitude().value(), org.longitude().value(), self._citiesMaxDist, self._citiesMinPopulation) if city: dsc = self._eventDescriptionPattern region = seismology.Regions.getRegionName( org.latitude().value(), org.longitude().value()) distStr = str(int(math.deg2km(dist))) dsc = dsc.replace("@region@", region).replace( "@dist@", distStr).replace("@poi@", city.name()) except BaseException: pass logging.debug("desc: %s" % dsc) dep = org.depth().value() now = core.Time.GMT() otm = org.time().value() dt = (now - otm).seconds() # if dt > dtmax: # return if dt > 3600: dt = "%d hours %d minutes ago" % (dt / 3600, (dt % 3600) / 60) elif dt > 120: dt = "%d minutes ago" % (dt / 60) else: dt = "%d seconds ago" % dt if preliminary: message = "earthquake, preliminary, %%s, %s" % dsc else: message = "earthquake, %%s, %s, %s, depth %d kilometers" % ( dsc, mag, int(dep + 0.5)) # at this point the message lacks the "ago" part if evt.publicID() in self._prevMessage and \ self._prevMessage[evt.publicID()] == message: logging.info("Suppressing repeated message '%s'" % message) return self._prevMessage[evt.publicID()] = message message = message % dt # fill the "ago" part logging.info(message) if not self._eventScript: return if self._eventProc is not None: if self._eventProc.poll() is None: logging.warning( "EventScript still in progress -> skipping message") return try: param2 = 0 param3 = 0 param4 = "" if newEvent: param2 = 1 org = self._cache.get(datamodel.Origin, evt.preferredOriginID()) if org: try: param3 = org.quality().associatedPhaseCount() except BaseException: pass nmag = self._cache.get(datamodel.Magnitude, evt.preferredMagnitudeID()) if nmag: param4 = "%.1f" % nmag.magnitude().value() self._eventProc = subprocess.Popen([ self._eventScript, message, "%d" % param2, evt.publicID(), "%d" % param3, param4 ]) logging.info("Started event script with pid %d" % self._eventProc.pid) except BaseException: logging.error( "Failed to start event script '%s %s %d %d %s'" % (self._eventScript, message, param2, param3, param4)) except BaseException: info = traceback.format_exception(*sys.exc_info()) for i in info: sys.stderr.write(i)