def run(self): retn = False try: for user in self._authBlacklist: self._userdb.blacklistUser(user) site = self._site() if not site: return False # start listen for incoming request self.__tcpPort = reactor.listenTCP(self._port, site, self._connections, self._listenAddress) # setup signal handler self.__sighup = False signal.signal(signal.SIGHUP, self._sighupHandler) task.LoopingCall(self._reloadTask).start(60, False) # start processing Logging.info("start listening") log.addObserver(logSC3) reactor.run() retn = True except Exception, e: Logging.error(str(e))
def onCancel(failure, req): if failure: Logging.error("%s %s" % (failure.getErrorMessage(), traceback.format_tb(failure.getTracebackObject()))) else: Logging.error("request canceled") req.cancel()
def emit(self, record): msg = self.format(record) if record.levelname == 'DEBUG': Logging.debug(charstar(msg)) elif record.levelname == 'INFO': Logging.info(charstar(msg)) elif record.levelname == 'WARNING': Logging.warning(charstar(msg)) else: Logging.error(charstar(msg))
def run(self): self.loadStreams() try: if self.inputFile == '-': f = sys.stdin else: f = open(self.inputFile) except IOError, e: Logging.error(str(e)) return False
def logSC3(entry): try: isError = entry['isError'] msg = entry['message'] if isError: for l in msg: Logging.error("[reactor] %s" % l) else: for l in msg: Logging.info("[reactor] %s" % l) except: pass
def onFinish(result, req): Logging.debug("finish value = %s" % str(result)) if isinstance(result, Failure): err = result.value if isinstance(err, defer.CancelledError): Logging.error("request canceled") return Logging.error("%s %s" % (result.getErrorMessage(), traceback.format_tb(result.getTracebackObject()))) else: if result: Logging.debug("request successfully served") else: Logging.debug("request failed") reactor.callFromThread(req.finish)
def get_closest_city(latitude, longitude): try: query = '%s/get_nearest_city?lat=%s&lon=%s&token=%s' % ( cfg.geolocation_service_url, latitude, longitude, cfg.geolocation_service_token) result = requests.get(query) distance, city, province = result.text.strip('()').encode( 'utf-8', errors='ignore').split(',') return 'a %s de %s, %s' % (distance, city.strip(" '"), province.strip(" '")) except Exception as e: msg_error = "##Error in get_closest_city:%s" % str(e) print(msg_error) logging.error(msg_error) return '---'
def __getDayRaw(self, day, startt, endt, net, sta, loc, cha, bufferSize): # Take into account the case of empty location if loc == '--': loc = '' for dataFile in self.__getMSName(day, net, sta, loc, cha): if not os.path.exists(dataFile): continue try: with open(dataFile, 'rb') as msFile: for buf in self.__getWaveform(startt, endt, msFile, bufferSize): yield buf except mseedlite.MSeedError as e: Logging.error("%s: %s" % (dataFile, str(e)))
def open_SC3Db(self, dbUrl): m = re.match("(?P<dbDriverName>^.*):\/\/(?P<dbAddress>.+?:.+?@.+?\/.+$)", dbUrl) if not m: raise SystemExit, "error in parsing SC3 DB url" _dbUrl = m.groupdict() dbDriver = IO.DatabaseInterface.Create(_dbUrl["dbDriverName"]) if dbDriver is None: Logging.error("Cannot find database driver " + _dbUrl["dbDriverName"]) raise SystemExit, "driver not found" if not dbDriver.connect(_dbUrl["dbAddress"]): Logging.error("Cannot connect to database at " + _dbUrl["dbAddress"]) raise SystemExit, "connection could not be established" print "opening destination Database: " + _dbUrl["dbAddress"] dbQuery = DataModel.DatabaseQuery(dbDriver) sc3wrap.dbQuery = dbQuery return dbQuery
def __init__(self, config, name, group): mediatorAddress = config.getString("connection.server") dbDriverName = config.getString("database.type") dbAddress = config.getString("database.parameters") connection = Communication.Connection.Create(mediatorAddress, name, group) if connection is None: Logging.error("Cannot connect to Mediator") raise ConnectionError, "connection could not be established" else: Logging.info("Connection has been established") dbDriver = IO.DatabaseInterface.Create(dbDriverName) if dbDriver is None: Logging.error("Cannot find database driver " + dbDriverName) raise DatabaseError, "driver not found" if not dbDriver.connect(dbAddress): Logging.error("Cannot connect to database at " + dbAddress) raise DatabaseError, "connection could not be established" self.__connection = connection # This reference to dbDriver is essential, since dbQuery becomes # invalid when dbDriver is deleted self.__dbDriver = dbDriver self.dbQuery = DatabaseQuery(dbDriver)
def run(self): self.loadStreams() try: if self.inputFile == '-': f = sys.stdin else: f = open(self.inputFile) except IOError as e: Logging.error(str(e)) return False ep = self.sh2proc(f) if ep is None: return False ar = IO.XMLArchive() ar.create('-') ar.setFormattedOutput(True) ar.writeObject(ep) ar.close() return True
def _filterInventory(self, inv, fileName, serviceName=""): if not fileName: return True class FilterRule: def __init__(self, name, code): self.name = name self.exclude = name.startswith("!") self.code = code self.restricted = None self.shared = None self.netClass = None self.archive = None # read filter configuration from INI file filter = [] includeRuleDefined = False try: import ConfigParser except ImportError, ie: Logging.error("could not load 'ConfigParser' Python module") return False
def getDACache(self): if not self._daEnabled: return None now = Core.Time.GMT() # check if cache is still valid if self._daCache is None or now > self._daCache.validUntil(): if self.query() is None or \ not self.query().driver().isConnected(): dbInt = IO.DatabaseInterface.Open(self.databaseURI()) if dbInt is None: Logging.error('failed to connect to database') return self._daCache else: self.setDatabase(dbInt) da = DataModel.DataAvailability() self.query().loadDataExtents(da) validUntil = now + Core.TimeSpan(self._daCacheDuration, 0) self._daCache = DataAvailabilityCache(self, da, validUntil) return self._daCache
def init(self): if not Client.Application.init(self): return False try: start = self.commandline().optionString("begin") except: start = "1900-01-01T00:00:00Z" self._startTime = _parseTime(start) if self._startTime is None: Logging.error("Wrong 'begin' format '%s'" % start) return False Logging.debug("Setting start to %s" % self._startTime.toString("%FT%TZ")) try: end = self.commandline().optionString("end") except: end = "2500-01-01T00:00:00Z" self._endTime = _parseTime(end) if self._endTime is None: Logging.error("Wrong 'end' format '%s'" % end) return False Logging.debug("Setting end to %s" % self._endTime.toString("%FT%TZ")) try: self._delimiter = self.commandline().optionString("delimiter") except: self._delimiter = "\n" try: modifiedAfter = self.commandline().optionString("modified-after") self._modifiedAfterTime = _parseTime(modifiedAfter) if self._modifiedAfterTime is None: Logging.error("Wrong 'modified-after' format '%s'" % modifiedAfter) return False Logging.debug("Setting 'modified-after' time to %s" % self._modifiedAfterTime.toString("%FT%TZ")) except: pass return True
def onRequestError(failure, req): Logging.error("%s %s" % (failure.getErrorMessage(), traceback.format_tb(failure.getTracebackObject()))) reactor.callFromThread(req.processingFailed, failure) return failure
def sh2proc(self, file): ep = DataModel.EventParameters() origin = DataModel.Origin.Create() event = DataModel.Event.Create() origin.setCreationInfo(DataModel.CreationInfo()) origin.creationInfo().setCreationTime(Core.Time.GMT()) originQuality = None originCE = None latFound = False lonFound = False depthError = None originComments = {} # variables, reset after 'end of phase' pick = None stationMag = None staCode = None compCode = None stationMagBB = None amplitudeDisp = None amplitudeVel = None amplitudeSNR = None amplitudeBB = None magnitudeMB = None magnitudeML = None magnitudeMS = None magnitudeBB = None km2degFac = 1.0 / Math.deg2km(1.0) # read file line by line, split key and value at colon iLine = 0 for line in file: iLine += 1 a = line.split(':', 1) key = a[0].strip() keyLower = key.lower() value = None # empty line if len(keyLower) == 0: continue # end of phase elif keyLower == '--- end of phase ---': if pick is None: Logging.warning('Line %i: found empty phase block' % iLine) continue if staCode is None or compCode is None: Logging.warning('Line %i: end of phase, stream code ' 'incomplete' % iLine) continue if staCode not in self.streams: Logging.warning('Line %i: end of phase, station code %s ' 'not found in inventory' % (iLine, staCode)) continue if compCode not in self.streams[staCode]: Logging.warning('Line %i: end of phase, component %s of ' 'station %s not found in inventory' % ( iLine, compCode, staCode)) continue streamID = self.streams[staCode][compCode] pick.setWaveformID(streamID) ep.add(pick) arrival.setPickID(pick.publicID()) arrival.setPhase(phase) origin.add(arrival) if amplitudeSNR is not None: amplitudeSNR.setPickID(pick.publicID()) amplitudeSNR.setWaveformID(streamID) ep.add(amplitudeSNR) if amplitudeBB is not None: amplitudeBB.setPickID(pick.publicID()) amplitudeBB.setWaveformID(streamID) ep.add(amplitudeBB) if stationMagBB is not None: stationMagBB.setWaveformID(streamID) origin.add(stationMagBB) stationMagContrib = DataModel.StationMagnitudeContribution() stationMagContrib.setStationMagnitudeID( stationMagBB.publicID()) if magnitudeBB is None: magnitudeBB = DataModel.Magnitude.Create() magnitudeBB.add(stationMagContrib) if stationMag is not None: if stationMag.type() in ['mb', 'ML'] and amplitudeDisp is not None: amplitudeDisp.setPickID(pick.publicID()) amplitudeDisp.setWaveformID(streamID) amplitudeDisp.setPeriod( DataModel.RealQuantity(ampPeriod)) amplitudeDisp.setType(stationMag.type()) ep.add(amplitudeDisp) if stationMag.type() in ['Ms(BB)'] and amplitudeVel is not None: amplitudeVel.setPickID(pick.publicID()) amplitudeVel.setWaveformID(streamID) amplitudeVel.setPeriod( DataModel.RealQuantity(ampPeriod)) amplitudeVel.setType(stationMag.type()) ep.add(amplitudeVel) stationMag.setWaveformID(streamID) origin.add(stationMag) stationMagContrib = DataModel.StationMagnitudeContribution() stationMagContrib.setStationMagnitudeID( stationMag.publicID()) magType = stationMag.type() if magType == 'ML': if magnitudeML is None: magnitudeML = DataModel.Magnitude.Create() magnitudeML.add(stationMagContrib) elif magType == 'Ms(BB)': if magnitudeMS is None: magnitudeMS = DataModel.Magnitude.Create() magnitudeMS.add(stationMagContrib) elif magType == 'mb': if magnitudeMB is None: magnitudeMB = DataModel.Magnitude.Create() magnitudeMB.add(stationMagContrib) pick = None staCode = None compCode = None stationMag = None stationMagBB = None amplitudeDisp = None amplitudeVel = None amplitudeSNR = None amplitudeBB = None continue # empty key elif len(a) == 1: Logging.warning('Line %i: key without value' % iLine) continue value = a[1].strip() if pick is None: pick = DataModel.Pick.Create() arrival = DataModel.Arrival() try: ############################################################## # station parameters # station code if keyLower == 'station code': staCode = value # pick time elif keyLower == 'onset time': pick.setTime(DataModel.TimeQuantity(self.parseTime(value))) # pick onset type elif keyLower == 'onset type': found = False for onset in [DataModel.EMERGENT, DataModel.IMPULSIVE, DataModel.QUESTIONABLE]: if value == DataModel.EPickOnsetNames_name(onset): pick.setOnset(onset) found = True break if not found: raise Exception('Unsupported onset value') # phase code elif keyLower == 'phase name': phase = DataModel.Phase() phase.setCode(value) pick.setPhaseHint(phase) # event type elif keyLower == 'event type': evttype = EventTypes[value] event.setType(evttype) originComments[key] = value # filter ID elif keyLower == 'applied filter': pick.setFilterID(value) # channel code, prepended by configured Channel prefix if only # one character is found elif keyLower == 'component': compCode = value # pick evaluation mode elif keyLower == 'pick type': found = False for mode in [DataModel.AUTOMATIC, DataModel.MANUAL]: if value == DataModel.EEvaluationModeNames_name(mode): pick.setEvaluationMode(mode) found = True break if not found: raise Exception('Unsupported evaluation mode value') # pick author elif keyLower == 'analyst': creationInfo = DataModel.CreationInfo() creationInfo.setAuthor(value) pick.setCreationInfo(creationInfo) # pick polarity # isn't tested elif keyLower == 'sign': if value == 'positive': sign = '0' # positive elif value == 'negative': sign = '1' # negative else: sign = '2' # unknown pick.setPolarity(float(sign)) # arrival weight elif keyLower == 'weight': arrival.setWeight(float(value)) # arrival azimuth elif keyLower == 'theo. azimuth (deg)': arrival.setAzimuth(float(value)) # pick theo backazimuth elif keyLower == 'theo. backazimuth (deg)': if pick.slownessMethodID() == 'corrected': Logging.debug('Line %i: ignoring parameter: %s' % ( iLine, key)) else: pick.setBackazimuth( DataModel.RealQuantity(float(value))) pick.setSlownessMethodID('theoretical') # pick beam slowness elif keyLower == 'beam-slowness (sec/deg)': if pick.slownessMethodID() == 'corrected': Logging.debug('Line %i: ignoring parameter: %s' % ( iLine, key)) else: pick.setHorizontalSlowness( DataModel.RealQuantity(float(value))) pick.setSlownessMethodID('Array Beam') # pick beam backazimuth elif keyLower == 'beam-azimuth (deg)': if pick.slownessMethodID() == 'corrected': Logging.debug('Line %i: ignoring parameter: %s' % ( iLine, key)) else: pick.setBackazimuth( DataModel.RealQuantity(float(value))) # pick epi slowness elif keyLower == 'epi-slowness (sec/deg)': pick.setHorizontalSlowness( DataModel.RealQuantity(float(value))) pick.setSlownessMethodID('corrected') # pick epi backazimuth elif keyLower == 'epi-azimuth (deg)': pick.setBackazimuth(DataModel.RealQuantity(float(value))) # arrival distance degree elif keyLower == 'distance (deg)': arrival.setDistance(float(value)) # arrival distance km, recalculates for degree elif keyLower == 'distance (km)': if isinstance(arrival.distance(), float): Logging.debug('Line %i: ignoring parameter: %s' % ( iLine-1, 'distance (deg)')) arrival.setDistance(float(value) * km2degFac) # arrival time residual elif keyLower == 'residual time': arrival.setTimeResidual(float(value)) # amplitude snr elif keyLower == 'signal/noise': amplitudeSNR = DataModel.Amplitude.Create() amplitudeSNR.setType('SNR') amplitudeSNR.setAmplitude( DataModel.RealQuantity(float(value))) # amplitude period elif keyLower.startswith('period'): ampPeriod = float(value) # amplitude value for displacement elif keyLower == 'amplitude (nm)': amplitudeDisp = DataModel.Amplitude.Create() amplitudeDisp.setAmplitude( DataModel.RealQuantity(float(value))) amplitudeDisp.setUnit('nm') # amplitude value for velocity elif keyLower.startswith('vel. amplitude'): amplitudeVel = DataModel.Amplitude.Create() amplitudeVel.setAmplitude( DataModel.RealQuantity(float(value))) amplitudeVel.setUnit('nm/s') elif keyLower == 'bb amplitude (nm/sec)': amplitudeBB = DataModel.Amplitude.Create() amplitudeBB.setAmplitude( DataModel.RealQuantity(float(value))) amplitudeBB.setType('mB') amplitudeBB.setUnit('nm/s') amplitudeBB.setPeriod(DataModel.RealQuantity(ampBBPeriod)) elif keyLower == 'bb period (sec)': ampBBPeriod = float(value) elif keyLower == 'broadband magnitude': magType = self.parseMagType('bb') stationMagBB = DataModel.StationMagnitude.Create() stationMagBB.setMagnitude( DataModel.RealQuantity(float(value))) stationMagBB.setType(magType) stationMagBB.setAmplitudeID(amplitudeBB.publicID()) # ignored elif keyLower == 'quality number': Logging.debug('Line %i: ignoring parameter: %s' % ( iLine, key)) # station magnitude value and type elif keyLower.startswith('magnitude '): magType = self.parseMagType(key[10:]) stationMag = DataModel.StationMagnitude.Create() stationMag.setMagnitude( DataModel.RealQuantity(float(value))) if len(magType) > 0: stationMag.setType(magType) if magType == 'mb': stationMag.setAmplitudeID(amplitudeDisp.publicID()) elif magType == 'MS(BB)': stationMag.setAmplitudeID(amplitudeVel.publicID()) else: Logging.debug('Line %i: Magnitude Type not known %s.' % ( iLine, magType)) ############################################################### # origin parameters # event ID, added as origin comment later on elif keyLower == 'event id': originComments[key] = value # magnitude value and type elif keyLower == 'mean bb magnitude': magType = self.parseMagType('bb') if magnitudeBB is None: magnitudeBB = DataModel.Magnitude.Create() magnitudeBB.setMagnitude( DataModel.RealQuantity(float(value))) magnitudeBB.setType(magType) elif keyLower.startswith('mean magnitude '): magType = self.parseMagType(key[15:]) if magType == 'ML': if magnitudeML is None: magnitudeML = DataModel.Magnitude.Create() magnitudeML.setMagnitude( DataModel.RealQuantity(float(value))) magnitudeML.setType(magType) elif magType == 'Ms(BB)': if magnitudeMS is None: magnitudeMS = DataModel.Magnitude.Create() magnitudeMS.setMagnitude( DataModel.RealQuantity(float(value))) magnitudeMS.setType(magType) elif magType == 'mb': if magnitudeMB is None: magnitudeMB = DataModel.Magnitude.Create() magnitudeMB.setMagnitude( DataModel.RealQuantity(float(value))) magnitudeMB.setType(magType) else: Logging.warning('Line %i: Magnitude type %s not defined yet.' % ( iLine, magType)) # latitude elif keyLower == 'latitude': origin.latitude().setValue(float(value)) latFound = True elif keyLower == 'error in latitude (km)': origin.latitude().setUncertainty(float(value)) # longitude elif keyLower == 'longitude': origin.longitude().setValue(float(value)) lonFound = True elif keyLower == 'error in longitude (km)': origin.longitude().setUncertainty(float(value)) # depth elif keyLower == 'depth (km)': origin.setDepth(DataModel.RealQuantity(float(value))) if depthError is not None: origin.depth().setUncertainty(depthError) elif keyLower == 'depth type': Logging.debug('Line %i: ignoring parameter: %s' % ( iLine, key)) elif keyLower == 'error in depth (km)': depthError = float(value) try: origin.depth().setUncertainty(depthError) except Core.ValueException: pass # time elif keyLower == 'origin time': origin.time().setValue(self.parseTime(value)) elif keyLower == 'error in origin time': origin.time().setUncertainty(float(value)) # location method elif keyLower == 'location method': origin.setMethodID(str(value)) # region table, added as origin comment later on elif keyLower == 'region table': originComments[key] = value # region table, added as origin comment later on elif keyLower == 'region id': originComments[key] = value # source region, added as origin comment later on elif keyLower == 'source region': originComments[key] = value # used station count elif keyLower == 'no. of stations used': if originQuality is None: originQuality = DataModel.OriginQuality() originQuality.setUsedStationCount(int(value)) # ignored elif keyLower == 'reference location name': Logging.debug('Line %i: ignoring parameter: %s' % ( iLine, key)) # confidence ellipsoid major axis elif keyLower == 'error ellipse major': if originCE is None: originCE = DataModel.ConfidenceEllipsoid() originCE.setSemiMajorAxisLength(float(value)) # confidence ellipsoid minor axis elif keyLower == 'error ellipse minor': if originCE is None: originCE = DataModel.ConfidenceEllipsoid() originCE.setSemiMinorAxisLength(float(value)) # confidence ellipsoid rotation elif keyLower == 'error ellipse strike': if originCE is None: originCE = DataModel.ConfidenceEllipsoid() originCE.setMajorAxisRotation(float(value)) # azimuthal gap elif keyLower == 'max azimuthal gap (deg)': if originQuality is None: originQuality = DataModel.OriginQuality() originQuality.setAzimuthalGap(float(value)) # creation info author elif keyLower == 'author': origin.creationInfo().setAuthor(value) # creation info agency elif keyLower == 'source of information': origin.creationInfo().setAgencyID(value) # earth model id elif keyLower == 'velocity model': origin.setEarthModelID(value) # standard error elif keyLower == 'rms of residuals (sec)': if originQuality is None: originQuality = DataModel.OriginQuality() originQuality.setStandardError(float(value)) # ignored elif keyLower == 'phase flags': Logging.debug('Line %i: ignoring parameter: %s' % ( iLine, key)) # ignored elif keyLower == 'location input params': Logging.debug('Line %i: ignoring parameter: %s' % ( iLine, key)) # missing keys elif keyLower == 'ampl&period source': Logging.debug('Line %i: ignoring parameter: %s' % ( iLine, key)) elif keyLower == 'location quality': Logging.debug('Line %i: ignoring parameter: %s' % ( iLine, key)) elif keyLower == 'reference latitude': Logging.debug('Line %i: ignoring parameter: %s' % ( iLine, key)) elif keyLower == 'reference longitude': Logging.debug('Line %i: ignoring parameter: %s' % ( iLine, key)) elif keyLower.startswith('amplitude time'): Logging.debug('Line %i: ignoring parameter: %s' % ( iLine, key)) # unknown key else: Logging.warning('Line %i: ignoring unknown parameter: %s' % (iLine, key)) except ValueError as ve: Logging.warning('Line %i: can not parse %s value' % ( iLine, key)) except Exception: Logging.error('Line %i: %s' % (iLine, str(traceback.format_exc()))) return None # check if not latFound: Logging.warning('could not add origin, missing latitude parameter') elif not lonFound: Logging.warning( 'could not add origin, missing longitude parameter') elif not origin.time().value().valid(): Logging.warning( 'could not add origin, missing origin time parameter') else: if magnitudeMB is not None: origin.add(magnitudeMB) if magnitudeML is not None: origin.add(magnitudeML) if magnitudeMS is not None: origin.add(magnitudeMS) if magnitudeBB is not None: origin.add(magnitudeBB) ep.add(event) ep.add(origin) if originQuality is not None: origin.setQuality(originQuality) if originCE is not None: uncertainty = DataModel.OriginUncertainty() uncertainty.setConfidenceEllipsoid(originCE) origin.setUncertainty(uncertainty) for k, v in originComments.items(): comment = DataModel.Comment() comment.setId(k) comment.setText(v) origin.add(comment) return ep
def input(self): fastsdsPrefix = 'fastsds://' if self.__url.startswith(fastsdsPrefix): fastsds = SDS(self.__url[len(fastsdsPrefix):]) else: fastsds = None for (net, sta, loc, cha, startt, endt, restricted, archNet) in self.__tw: if not archNet: archNet = net size = 0 if fastsds: start = dateutil.parser.parse(startt.iso()).replace(tzinfo=None) end = dateutil.parser.parse(endt.iso()).replace(tzinfo=None) for data in fastsds.getRawBytes(start, end, archNet, sta, loc, cha, self.__bufferSize): size += len(data) if archNet == net: yield data else: try: yield self.__override_network(data, net) except Exception, e: Logging.error("could not override network code: %s" % str(e)) else: rs = RecordStream.Open(self.__url) if rs is None: Logging.error("could not open record stream") break rs.addStream(archNet, sta, loc, cha, startt, endt) rsInput = RecordInput(rs, Array.INT, Record.SAVE_RAW) eof = False while not eof: data = "" while len(data) < self.__bufferSize: try: rec = rsInput.next() except Exception, e: Logging.error("%s" % str(e)) eof = True break if rec is None: eof = True break data += rec.raw().str() if data: size += len(data) if archNet == net: yield data else: try: yield self.__override_network(data, net) except Exception, e: Logging.error("could not override network code: %s" % str(e))
def onResponseFailure(err, call): Logging.error("response canceled") call.cancel()
def _site(self): modeStr = None if self._evaluationMode is not None: modeStr = DataModel.EEvaluationModeNames.name(self._evaluationMode) whitelistStr = "<None>" if self._eventTypeWhitelist is not None: whitelistStr = ", ".join(self._eventTypeWhitelist) blacklistStr = "<None>" if self._eventTypeBlacklist is not None: blacklistStr = ", ".join(self._eventTypeBlacklist) stationFilterStr = "<None>" if self._stationFilter is not None: stationFilterStr = self._stationFilter dataSelectFilterStr = "<None>" if self._dataSelectFilter is not None: dataSelectFilterStr = self._dataSelectFilter Logging.debug("\n" "configuration read:\n" " serve\n" " dataselect : %s\n" " event : %s\n" " station : %s\n" " availability : %s\n" " listenAddress : %s\n" " port : %i\n" " connections : %i\n" " htpasswd : %s\n" " accessLog : %s\n" " queryObjects : %i\n" " realtimeGap : %s\n" " samples (M) : %s\n" " recordBulkSize : %i\n" " allowRestricted : %s\n" " useArclinkAccess: %s\n" " hideAuthor : %s\n" " evaluationMode : %s\n" " data availability\n" " enabled : %s\n" " cache duration: %i\n" " repo name : %s\n" " dcc name : %s\n" " eventType\n" " whitelist : %s\n" " blacklist : %s\n" " inventory filter\n" " station : %s\n" " dataSelect : %s\n" " debug enabled : %s\n" " trackdb\n" " enabled : %s\n" " defaultUser : %s\n" " auth\n" " enabled : %s\n" " gnupgHome : %s\n" " requestLog : %s\n" % ( self._serveDataSelect, self._serveEvent, self._serveStation, self._serveAvailability, self._listenAddress, self._port, self._connections, self._htpasswd, self._accessLogFile, self._queryObjects, self._realtimeGap, self._samplesM, self._recordBulkSize, self._allowRestricted, self._useArclinkAccess, self._hideAuthor, modeStr, self._daEnabled, self._daCacheDuration, self._daRepositoryName, self._daDCCName, whitelistStr, blacklistStr, stationFilterStr, dataSelectFilterStr, self._debugFilter, self._trackdbEnabled, self._trackdbDefaultUser, self._authEnabled, self._authGnupgHome, self._requestLogFile)) if not self._serveDataSelect and not self._serveEvent and \ not self._serveStation: Logging.error("all services disabled through configuration") return None # access logger if requested if self._accessLogFile: self._accessLog = Log(self._accessLogFile) # request logger if requested if self._requestLogFile: # import here, so we don't depend on GeoIP if request log is not needed from seiscomp3.fdsnws.reqlog import RequestLog self._requestLog = RequestLog(self._requestLogFile) # load inventory needed by DataSelect and Station service stationInv = dataSelectInv = None if self._serveDataSelect or self._serveStation: retn = False stationInv = dataSelectInv = Inventory.Instance().inventory() Logging.info("inventory loaded") if self._serveDataSelect and self._serveStation: # clone inventory if station and dataSelect filter are distinct # else share inventory between both services if self._stationFilter != self._dataSelectFilter: dataSelectInv = self._cloneInventory(stationInv) retn = self._filterInventory(stationInv, self._stationFilter, "station") and \ self._filterInventory( dataSelectInv, self._dataSelectFilter, "dataSelect") else: retn = self._filterInventory( stationInv, self._stationFilter) elif self._serveStation: retn = self._filterInventory(stationInv, self._stationFilter) else: retn = self._filterInventory( dataSelectInv, self._dataSelectFilter) if not retn: return None self._access = Access() if self._serveDataSelect and self._useArclinkAccess: self._access.initFromSC3Routing(self.query().loadRouting()) DataModel.PublicObject.SetRegistrationEnabled(False) shareDir = os.path.join(Environment.Instance().shareDir(), 'fdsnws') # Overwrite/set mime type of *.wadl and *.xml documents. Instead of # using the official types defined in /etc/mime.types 'application/xml' # is used as enforced by the FDSNWS spec. static.File.contentTypes['.wadl'] = 'application/xml' static.File.contentTypes['.xml'] = 'application/xml' # create resource tree /fdsnws/... root = ListingResource() fileName = os.path.join(shareDir, 'favicon.ico') fileRes = static.File(fileName, 'image/x-icon') fileRes.childNotFound = NoResource() fileRes.isLeaf = True root.putChild('favicon.ico', fileRes) prefix = ListingResource() root.putChild('fdsnws', prefix) # dataselect if self._serveDataSelect: dataselect = ListingResource(DataSelectVersion) prefix.putChild('dataselect', dataselect) lstFile = os.path.join(shareDir, 'dataselect.html') dataselect1 = DirectoryResource(lstFile, DataSelectVersion) dataselect.putChild('1', dataselect1) dataselect1.putChild('query', FDSNDataSelect( dataSelectInv, self._recordBulkSize)) msg = 'authorization for restricted time series data required' authSession = self._getAuthSessionWrapper(dataSelectInv, msg) dataselect1.putChild('queryauth', authSession) dataselect1.putChild('version', ServiceVersion(DataSelectVersion)) fileRes = static.File(os.path.join(shareDir, 'dataselect.wadl')) fileRes.childNotFound = NoResource(DataSelectVersion) dataselect1.putChild('application.wadl', fileRes) fileRes = static.File(os.path.join( shareDir, 'dataselect-builder.html')) fileRes.childNotFound = NoResource(DataSelectVersion) dataselect1.putChild('builder', fileRes) if self._authEnabled: dataselect1.putChild('auth', AuthResource( DataSelectVersion, self._authGnupgHome, self._userdb)) # event if self._serveEvent: event = ListingResource(EventVersion) prefix.putChild('event', event) lstFile = os.path.join(shareDir, 'event.html') event1 = DirectoryResource(lstFile, EventVersion) event.putChild('1', event1) event1.putChild('query', FDSNEvent(self._hideAuthor, self._evaluationMode, self._eventTypeWhitelist, self._eventTypeBlacklist, self._eventFormats)) fileRes = static.File(os.path.join(shareDir, 'catalogs.xml')) fileRes.childNotFound = NoResource(EventVersion) event1.putChild('catalogs', fileRes) fileRes = static.File(os.path.join(shareDir, 'contributors.xml')) fileRes.childNotFound = NoResource(EventVersion) event1.putChild('contributors', fileRes) event1.putChild('version', ServiceVersion(EventVersion)) fileRes = static.File(os.path.join(shareDir, 'event.wadl')) fileRes.childNotFound = NoResource(EventVersion) event1.putChild('application.wadl', fileRes) fileRes = static.File(os.path.join(shareDir, 'event-builder.html')) fileRes.childNotFound = NoResource(EventVersion) event1.putChild('builder', fileRes) # station if self._serveStation: station = ListingResource(StationVersion) prefix.putChild('station', station) lstFile = os.path.join(shareDir, 'station.html') station1 = DirectoryResource(lstFile, StationVersion) station.putChild('1', station1) station1.putChild('query', FDSNStation(stationInv, self._allowRestricted, self._queryObjects, self._daEnabled)) station1.putChild('version', ServiceVersion(StationVersion)) # wadl, optionally filtered filterList = [] if self._daEnabled else ['name="matchtimeseries"'] try: fileRes = WADLFilter(os.path.join(shareDir, 'station.wadl'), filterList) except: fileRes = NoResource(StationVersion) station1.putChild('application.wadl', fileRes) fileRes = static.File(os.path.join( shareDir, 'station-builder.html')) fileRes.childNotFound = NoResource(StationVersion) station1.putChild('builder', fileRes) # availability if self._serveAvailability: # create a set of waveformIDs which represent open channels if self._serveDataSelect: openStreams = set() for iNet in xrange(dataSelectInv.networkCount()): net = dataSelectInv.network(iNet) if utils.isRestricted(net): continue for iSta in xrange(net.stationCount()): sta = net.station(iSta) if utils.isRestricted(sta): continue for iLoc in xrange(sta.sensorLocationCount()): loc = sta.sensorLocation(iLoc) for iCha in xrange(loc.streamCount()): cha = loc.stream(iCha) if utils.isRestricted(cha): continue openStreams.add("{0}.{1}.{2}.{3}".format( net.code(), sta.code(), loc.code(), cha.code())) self._openStreams = openStreams else: self._openStreams = None ext = ListingResource() prefix.putChild('ext', ext) availability = ListingResource(AvailabilityVersion) ext.putChild('availability', availability) lstFile = os.path.join(shareDir, 'availability.html') availability1 = DirectoryResource(lstFile, AvailabilityVersion) availability.putChild('1', availability1) availability1.putChild('extent', AvailabilityExtent()) availability1.putChild('query', AvailabilityQuery()) availability1.putChild( 'version', ServiceVersion(AvailabilityVersion)) fileRes = static.File(os.path.join(shareDir, 'station.wadl')) fileRes.childNotFound = NoResource(AvailabilityVersion) availability1.putChild('availability.wadl', fileRes) fileRes = static.File(os.path.join( shareDir, 'availability-extent-builder.html')) fileRes.childNotFound = NoResource(AvailabilityVersion) availability1.putChild('builder-extent', fileRes) fileRes = static.File(os.path.join( shareDir, 'availability-builder.html')) fileRes.childNotFound = NoResource(AvailabilityVersion) availability1.putChild('builder', fileRes) # static files fileRes = static.File(os.path.join(shareDir, 'js')) fileRes.childNotFound = NoResource() fileRes.hideInListing = True prefix.putChild('js', fileRes) fileRes = static.File(os.path.join(shareDir, 'css')) fileRes.childNotFound = NoResource() fileRes.hideInListing = True prefix.putChild('css', fileRes) return Site(root)
try: cp = ConfigParser.ConfigParser() Logging.notice("reading inventory filter file: %s" % fileName) cp.readfp(open(fileName, 'r')) if len(cp.sections()) == 0: return True # check for mandatory code attribute for sectionName in cp.sections(): code = "" try: code = cp.get(sectionName, "code") except: Logging.error("missing 'code' attribute in section %s of " \ "inventory filter file %s" % ( sectionName, fileName)) return False rule = FilterRule(sectionName, str(code)) try: rule.restricted = cp.getboolean(sectionName, 'restricted') except: pass try: rule.shared = cp.getboolean(sectionName, 'shared') except: pass try: rule.netClass = str(cp.get(sectionName, 'netClass'))
def run(self): modeStr = None if self._evaluationMode is not None: modeStr = DataModel.EEvaluationModeNames.name(self._evaluationMode) whitelistStr = "<None>" if self._eventTypeWhitelist is not None: whitelistStr = ", ".join(self._eventTypeWhitelist) blacklistStr = "<None>" if self._eventTypeBlacklist is not None: blacklistStr = ", ".join(self._eventTypeBlacklist) stationFilterStr = "<None>" if self._stationFilter is not None: stationFilterStr = self._stationFilter dataSelectFilterStr = "<None>" if self._dataSelectFilter is not None: dataSelectFilterStr = self._dataSelectFilter Logging.debug("\n" \ "configuration read:\n" \ " serve\n" \ " dataselect : %s\n" \ " event : %s\n" \ " station : %s\n" \ " listenAddress : %s\n" \ " port : %i\n" \ " connections : %i\n" \ " htpasswd : %s\n" \ " accessLog : %s\n" \ " queryObjects : %i\n" \ " realtimeGap : %s\n" \ " samples (M) : %s\n" \ " allowRestricted : %s\n" \ " useArclinkAccess: %s\n" \ " hideAuthor : %s\n" \ " evaluationMode : %s\n" \ " eventType\n" \ " whitelist : %s\n" \ " blacklist : %s\n" \ " inventory filter\n" \ " station : %s\n" \ " dataSelect : %s\n" \ " debug enabled : %s\n" \ " trackdb\n" \ " enabled : %s\n" \ " defaultUser : %s\n" \ " auth\n" \ " enabled : %s\n" \ " gnupgHome : %s\n" % ( self._serveDataSelect, self._serveEvent, self._serveStation, self._listenAddress, self._port, self._connections, self._htpasswd, self._accessLogFile, self._queryObjects, self._realtimeGap, self._samplesM, self._allowRestricted, self._useArclinkAccess, self._hideAuthor, modeStr, whitelistStr, blacklistStr, stationFilterStr, dataSelectFilterStr, self._debugFilter, self._trackdbEnabled, self._trackdbDefaultUser, self._authEnabled, self._authGnupgHome)) if not self._serveDataSelect and not self._serveEvent and \ not self._serveStation: Logging.error("all services disabled through configuration") return False # access logger if requested if self._accessLogFile: self._accessLog = Log(self._accessLogFile) # load inventory needed by DataSelect and Station service stationInv = dataSelectInv = None if self._serveDataSelect or self._serveStation: retn = False stationInv = dataSelectInv = Inventory.Instance().inventory() Logging.info("inventory loaded") if self._serveDataSelect and self._serveStation: # clone inventory if station and dataSelect filter are distinct # else share inventory between both services if self._stationFilter != self._dataSelectFilter: dataSelectInv = self._cloneInventory(stationInv) retn = self._filterInventory(stationInv, self._stationFilter, "station") and \ self._filterInventory(dataSelectInv, self._dataSelectFilter, "dataSelect") else: retn = self._filterInventory(stationInv, self._stationFilter) elif self._serveStation: retn = self._filterInventory(stationInv, self._stationFilter) else: retn = self._filterInventory(dataSelectInv, self._dataSelectFilter) if not retn: return False if self._serveDataSelect: self._access.initFromSC3Routing(self.query().loadRouting()) DataModel.PublicObject.SetRegistrationEnabled(False) shareDir = os.path.join(Environment.Instance().shareDir(), 'fdsnws') # Overwrite/set mime type of *.wadl and *.xml documents. Instead of # using the official types defined in /etc/mime.types 'application/xml' # is used as enforced by the FDSNWS spec. static.File.contentTypes['.wadl'] = 'application/xml' static.File.contentTypes['.xml'] = 'application/xml' # create resource tree /fdsnws/... root = ListingResource() fileName = os.path.join(shareDir, 'favicon.ico') fileRes = static.File(fileName, 'image/x-icon') fileRes.childNotFound = NoResource() fileRes.isLeaf = True root.putChild('favicon.ico', fileRes) prefix = ListingResource() root.putChild('fdsnws', prefix) # right now service version is shared by all services serviceVersion = ServiceVersion() # dataselect if self._serveDataSelect: dataselect = ListingResource() prefix.putChild('dataselect', dataselect) dataselect1 = DirectoryResource(os.path.join(shareDir, 'dataselect.html')) dataselect.putChild('1', dataselect1) dataselect1.putChild('query', FDSNDataSelect(dataSelectInv)) msg = 'authorization for restricted time series data required' authSession = self._getAuthSessionWrapper(dataSelectInv, msg) dataselect1.putChild('queryauth', authSession) dataselect1.putChild('version', serviceVersion) fileRes = static.File(os.path.join(shareDir, 'dataselect.wadl')) fileRes.childNotFound = NoResource() dataselect1.putChild('application.wadl', fileRes) fileRes = static.File(os.path.join(shareDir, 'dataselect-builder.html')) fileRes.childNotFound = NoResource() dataselect1.putChild('builder', fileRes) if self._authEnabled: dataselect1.putChild('auth', AuthResource(self._authGnupgHome, self._userdb)) # event if self._serveEvent: event = ListingResource() prefix.putChild('event', event) event1 = DirectoryResource(os.path.join(shareDir, 'event.html')) event.putChild('1', event1) event1.putChild('query', FDSNEvent(self._hideAuthor, self._evaluationMode, self._eventTypeWhitelist, self._eventTypeBlacklist)) fileRes = static.File(os.path.join(shareDir, 'catalogs.xml')) fileRes.childNotFound = NoResource() event1.putChild('catalogs', fileRes) fileRes = static.File(os.path.join(shareDir, 'contributors.xml')) fileRes.childNotFound = NoResource() event1.putChild('contributors', fileRes) event1.putChild('version', serviceVersion) fileRes = static.File(os.path.join(shareDir, 'event.wadl')) fileRes.childNotFound = NoResource() event1.putChild('application.wadl', fileRes) fileRes = static.File(os.path.join(shareDir, 'event-builder.html')) fileRes.childNotFound = NoResource() event1.putChild('builder', fileRes) # station if self._serveStation: station = ListingResource() prefix.putChild('station', station) station1 = DirectoryResource(os.path.join(shareDir, 'station.html')) station.putChild('1', station1) station1.putChild('query', FDSNStation(stationInv, self._allowRestricted, self._queryObjects)) station1.putChild('version', serviceVersion) fileRes = static.File(os.path.join(shareDir, 'station.wadl')) fileRes.childNotFound = NoResource() station1.putChild('application.wadl', fileRes) fileRes = static.File(os.path.join(shareDir, 'station-builder.html')) fileRes.childNotFound = NoResource() station1.putChild('builder', fileRes) # static files fileRes = static.File(os.path.join(shareDir, 'js')) fileRes.childNotFound = NoResource() fileRes.hideInListing = True prefix.putChild('js', fileRes) fileRes = static.File(os.path.join(shareDir, 'css')) fileRes.childNotFound = NoResource() fileRes.hideInListing = True prefix.putChild('css', fileRes) retn = False try: # start listen for incoming request reactor.listenTCP(self._port, Site(root), self._connections, self._listenAddress) # start processing Logging.info("start listening") log.addObserver(logSC3) reactor.run() retn = True except Exception, e: Logging.error(str(e))
def run(self): modeStr = None if self._evaluationMode is not None: modeStr = DataModel.EEvaluationModeNames.name(self._evaluationMode) whitelistStr = "<None>" if self._eventTypeWhitelist is not None: whitelistStr = ", ".join(self._eventTypeWhitelist) blacklistStr = "<None>" if self._eventTypeBlacklist is not None: blacklistStr = ", ".join(self._eventTypeBlacklist) stationFilterStr = "<None>" if self._stationFilter is not None: stationFilterStr = self._stationFilter dataSelectFilterStr = "<None>" if self._dataSelectFilter is not None: dataSelectFilterStr = self._dataSelectFilter Logging.debug("\n" \ "configuration read:\n" \ " serve\n" \ " dataselect : %s\n" \ " event : %s\n" \ " station : %s\n" \ " listenAddress : %s\n" \ " port : %i\n" \ " connections : %i\n" \ " htpasswd : %s\n" \ " accessLog : %s\n" \ " queryObjects : %i\n" \ " realtimeGap : %s\n" \ " samples (M) : %s\n" \ " allowRestricted : %s\n" \ " useArclinkAccess: %s\n" \ " hideAuthor : %s\n" \ " evaluationMode : %s\n" \ " eventType\n" \ " whitelist : %s\n" \ " blacklist : %s\n" \ " inventory filter\n" \ " station : %s\n" \ " dataSelect : %s\n" \ " debug enabled : %s\n" \ " trackdb\n" \ " enabled : %s\n" \ " defaultUser : %s\n" \ " auth\n" \ " enabled : %s\n" \ " gnupgHome : %s\n" % ( self._serveDataSelect, self._serveEvent, self._serveStation, self._listenAddress, self._port, self._connections, self._htpasswd, self._accessLogFile, self._queryObjects, self._realtimeGap, self._samplesM, self._allowRestricted, self._useArclinkAccess, self._hideAuthor, modeStr, whitelistStr, blacklistStr, stationFilterStr, dataSelectFilterStr, self._debugFilter, self._trackdbEnabled, self._trackdbDefaultUser, self._authEnabled, self._authGnupgHome)) if not self._serveDataSelect and not self._serveEvent and \ not self._serveStation: Logging.error("all services disabled through configuration") return False # access logger if requested if self._accessLogFile: self._accessLog = Log(self._accessLogFile) # load inventory needed by DataSelect and Station service stationInv = dataSelectInv = None if self._serveDataSelect or self._serveStation: retn = False stationInv = dataSelectInv = Inventory.Instance().inventory() Logging.info("inventory loaded") if self._serveDataSelect and self._serveStation: # clone inventory if station and dataSelect filter are distinct # else share inventory between both services if self._stationFilter != self._dataSelectFilter: dataSelectInv = self._cloneInventory(stationInv) retn = self._filterInventory(stationInv, self._stationFilter, "station") and \ self._filterInventory(dataSelectInv, self._dataSelectFilter, "dataSelect") else: retn = self._filterInventory(stationInv, self._stationFilter) elif self._serveStation: retn = self._filterInventory(stationInv, self._stationFilter) else: retn = self._filterInventory(dataSelectInv, self._dataSelectFilter) if not retn: return False if self._serveDataSelect: self._access.initFromSC3Routing(self.query().loadRouting()) DataModel.PublicObject.SetRegistrationEnabled(False) shareDir = os.path.join(Environment.Instance().shareDir(), 'fdsnws') # Overwrite/set mime type of *.wadl and *.xml documents. Instead of # using the official types defined in /etc/mime.types 'application/xml' # is used as enforced by the FDSNWS spec. static.File.contentTypes['.wadl'] = 'application/xml' static.File.contentTypes['.xml'] = 'application/xml' # create resource tree /fdsnws/... root = ListingResource() fileName = os.path.join(shareDir, 'favicon.ico') fileRes = static.File(fileName, 'image/x-icon') fileRes.childNotFound = NoResource() fileRes.isLeaf = True root.putChild('favicon.ico', fileRes) prefix = ListingResource() root.putChild('fdsnws', prefix) # right now service version is shared by all services serviceVersion = ServiceVersion() # dataselect if self._serveDataSelect: dataselect = ListingResource() prefix.putChild('dataselect', dataselect) dataselect1 = DirectoryResource( os.path.join(shareDir, 'dataselect.html')) dataselect.putChild('1', dataselect1) dataselect1.putChild('query', FDSNDataSelect(dataSelectInv)) msg = 'authorization for restricted time series data required' authSession = self._getAuthSessionWrapper(dataSelectInv, msg) dataselect1.putChild('queryauth', authSession) dataselect1.putChild('version', serviceVersion) fileRes = static.File(os.path.join(shareDir, 'dataselect.wadl')) fileRes.childNotFound = NoResource() dataselect1.putChild('application.wadl', fileRes) fileRes = static.File( os.path.join(shareDir, 'dataselect-builder.html')) fileRes.childNotFound = NoResource() dataselect1.putChild('builder', fileRes) if self._authEnabled: dataselect1.putChild( 'auth', AuthResource(self._authGnupgHome, self._userdb)) # event if self._serveEvent: event = ListingResource() prefix.putChild('event', event) event1 = DirectoryResource(os.path.join(shareDir, 'event.html')) event.putChild('1', event1) event1.putChild( 'query', FDSNEvent(self._hideAuthor, self._evaluationMode, self._eventTypeWhitelist, self._eventTypeBlacklist)) fileRes = static.File(os.path.join(shareDir, 'catalogs.xml')) fileRes.childNotFound = NoResource() event1.putChild('catalogs', fileRes) fileRes = static.File(os.path.join(shareDir, 'contributors.xml')) fileRes.childNotFound = NoResource() event1.putChild('contributors', fileRes) event1.putChild('version', serviceVersion) fileRes = static.File(os.path.join(shareDir, 'event.wadl')) fileRes.childNotFound = NoResource() event1.putChild('application.wadl', fileRes) fileRes = static.File(os.path.join(shareDir, 'event-builder.html')) fileRes.childNotFound = NoResource() event1.putChild('builder', fileRes) # station if self._serveStation: station = ListingResource() prefix.putChild('station', station) station1 = DirectoryResource(os.path.join(shareDir, 'station.html')) station.putChild('1', station1) station1.putChild( 'query', FDSNStation(stationInv, self._allowRestricted, self._queryObjects)) station1.putChild('version', serviceVersion) fileRes = static.File(os.path.join(shareDir, 'station.wadl')) fileRes.childNotFound = NoResource() station1.putChild('application.wadl', fileRes) fileRes = static.File( os.path.join(shareDir, 'station-builder.html')) fileRes.childNotFound = NoResource() station1.putChild('builder', fileRes) # static files fileRes = static.File(os.path.join(shareDir, 'js')) fileRes.childNotFound = NoResource() fileRes.hideInListing = True prefix.putChild('js', fileRes) fileRes = static.File(os.path.join(shareDir, 'css')) fileRes.childNotFound = NoResource() fileRes.hideInListing = True prefix.putChild('css', fileRes) retn = False try: # start listen for incoming request reactor.listenTCP(self._port, Site(root), self._connections, self._listenAddress) # start processing Logging.info("start listening") log.addObserver(logSC3) reactor.run() retn = True except Exception, e: Logging.error(str(e))
def run(self): modeStr = None if self._evaluationMode is not None: modeStr = DataModel.EEvaluationModeNames.name(self._evaluationMode) whitelistStr = "<None>" if self._eventTypeWhitelist is not None: whitelistStr = ", ".join(self._eventTypeWhitelist) blacklistStr = "<None>" if self._eventTypeBlacklist is not None: blacklistStr = ", ".join(self._eventTypeBlacklist) Logging.notice("\n" \ "configuration read:\n" \ " serve\n" \ " dataselect : %s\n" \ " event : %s\n" \ " station : %s\n" \ " listenAddress : %s\n" \ " port : %i\n" \ " connections : %i\n" \ " htpasswd : %s\n" \ " accessLog : %s\n" \ " queryObjects : %i\n" \ " realtimeGap : %s\n" \ " samples (M) : %s\n" \ " allowRestricted : %s\n" \ " hideAuthor : %s\n" \ " evaluationMode : %s\n" \ " eventType\n" \ " whitelist : %s\n" \ " blacklist : %s\n" % ( self._serveDataSelect, self._serveEvent, self._serveStation, self._listenAddress, self._port, self._connections, self._htpasswd, self._accessLogFile, self._queryObjects, self._realtimeGap, self._samplesM, self._allowRestricted, self._hideAuthor, modeStr, whitelistStr, blacklistStr)) if not self._serveDataSelect and not self._serveEvent and \ not self._serveStation: Logging.error("all services disabled through configuration") return False # access logger if requested if self._accessLogFile: self._accessLog = Log(self._accessLogFile) # load inventory needed by DataSelect and Station service if self._serveDataSelect or self._serveStation: self._loadInventory() DataModel.PublicObject.SetRegistrationEnabled(False) shareDir = os.path.join(Environment.Instance().shareDir(), 'fdsnws') # Overwrite/set mime type of *.wadl and *.xml documents. Instead of # using the official types defined in /etc/mime.types 'application/xml' # is used as enforced by the FDSNWS spec. static.File.contentTypes['.wadl'] = 'application/xml' static.File.contentTypes['.xml'] = 'application/xml' # create resource tree /fdsnws/... root = ListingResource() fileName = os.path.join(shareDir, 'favicon.ico') fileRes = static.File(fileName, 'image/x-icon') fileRes.childNotFound = NoResource() fileRes.isLeaf = True root.putChild('favicon.ico', fileRes) prefix = ListingResource() root.putChild('fdsnws', prefix) # right now service version is shared by all services serviceVersion = ServiceVersion() # dataselect if self._serveDataSelect: dataselect = ListingResource() prefix.putChild('dataselect', dataselect) dataselect1 = DirectoryResource(os.path.join(shareDir, 'dataselect.html')) dataselect.putChild('1', dataselect1) dataselect1.putChild('query', FDSNDataSelect()) msg = 'authorization for restricted time series data required' authSession = self._getAuthSessionWrapper(FDSNDataSelectRealm(), msg) dataselect1.putChild('queryauth', authSession) dataselect1.putChild('version', serviceVersion) fileRes = static.File(os.path.join(shareDir, 'dataselect.wadl')) fileRes.childNotFound = NoResource() dataselect1.putChild('application.wadl', fileRes) # event if self._serveEvent: event = ListingResource() prefix.putChild('event', event) event1 = DirectoryResource(os.path.join(shareDir, 'event.html')) event.putChild('1', event1) event1.putChild('query', FDSNEvent(self._hideAuthor, self._evaluationMode, self._eventTypeWhitelist, self._eventTypeBlacklist)) fileRes = static.File(os.path.join(shareDir, 'catalogs.xml')) fileRes.childNotFound = NoResource() event1.putChild('catalogs', fileRes) fileRes = static.File(os.path.join(shareDir, 'contributors.xml')) fileRes.childNotFound = NoResource() event1.putChild('contributors', fileRes) event1.putChild('version', serviceVersion) fileRes = static.File(os.path.join(shareDir, 'event.wadl')) fileRes.childNotFound = NoResource() event1.putChild('application.wadl', fileRes) # station if self._serveStation: station = ListingResource() prefix.putChild('station', station) station1 = DirectoryResource(os.path.join(shareDir, 'station.html')) station.putChild('1', station1) station1.putChild('query', FDSNStation(self._inv, self._allowRestricted, self._queryObjects)) station1.putChild('version', serviceVersion) fileRes = static.File(os.path.join(shareDir, 'station.wadl')) fileRes.childNotFound = NoResource() station1.putChild('application.wadl', fileRes) retn = False try: # start listen for incoming request reactor.listenTCP(self._port, Site(root), self._connections, self._listenAddress) # start processing Logging.info("start listening") log.addObserver(logSC3) reactor.run() retn = True except Exception, e: Logging.error(str(e))
def sh2proc(self, file): ep = DataModel.EventParameters() magnitude = DataModel.Magnitude.Create() origin = DataModel.Origin.Create() origin.setCreationInfo(DataModel.CreationInfo()) origin.creationInfo().setCreationTime(Core.Time.GMT()) originQuality = None originCE = None latFound = False lonFound = False depthError = None originComments = {} # phase variables, reset after 'end of phase' pick = None stationMag = None staCode = None compCode = None # read file line by line, split key and value at colon iLine = 0 for line in file: iLine += 1 a = line.split(':', 1) key = a[0].strip() keyLower = key.lower() value = None # empty line if len(keyLower) == 0: continue # end of phase elif keyLower == '--- end of phase ---': if pick is None: Logging.warning('Line %i: found empty phase block' % iLine) continue if staCode is None or compCode is None: Logging.warning('Line %i: end of phase, stream code ' \ 'incomplete' % iLine) continue if not self.streams.has_key(staCode): Logging.warning('Line %i: end of phase, station code %s ' \ 'not found in inventory' % (iLine, staCode)) continue if not self.streams[staCode].has_key(compCode): Logging.warning('Line %i: end of phase, component %s of ' \ 'station %s not found in inventory' % ( iLine, compCode, staCode)) continue streamID = self.streams[staCode][compCode] pick.setWaveformID(streamID) ep.add(pick) arrival.setPickID(pick.publicID()) origin.add(arrival) amplitude.setPickID(pick.publicID()) ep.add(amplitude) if stationMag is not None: stationMag.setWaveformID(streamID) origin.add(stationMag) stationMagContrib = DataModel.StationMagnitudeContribution( ) stationMagContrib.setStationMagnitudeID( stationMag.publicID()) magnitude.add(stationMagContrib) pick = None staCode = None compCode = None stationMag = None continue # empty key elif len(a) == 1: Logging.warning('Line %i: key without value' % iLine) continue value = a[1].strip() if pick is None: pick = DataModel.Pick.Create() arrival = DataModel.Arrival() amplitude = DataModel.Amplitude.Create() try: ############################################################## # station parameters # station code if keyLower == 'station code': staCode = value # pick time elif keyLower == 'onset time': pick.setTime(DataModel.TimeQuantity(self.parseTime(value))) # pick onset type elif keyLower == 'onset type': found = False for onset in [ DataModel.EMERGENT, DataModel.IMPULSIVE, DataModel.QUESTIONABLE ]: if value == DataModel.EPickOnsetNames_name(onset): pick.setOnset(onset) found = True break if not found: raise Exception('Unsupported onset value') # phase code elif keyLower == 'phase name': phase = DataModel.Phase() phase.setCode(value) pick.setPhaseHint(phase) arrival.setPhase(phase) # event type, added as origin comment later on elif keyLower == 'event type': originComments[key] = value # filter ID elif keyLower == 'applied filter': pick.setFilterID(value) # channel code, prepended by configured Channel prefix if only # one character is found elif keyLower == 'component': compCode = value # pick evaluation mode elif keyLower == 'pick type': found = False for mode in [DataModel.AUTOMATIC, DataModel.MANUAL]: if value == DataModel.EEvaluationModeNames_name(mode): pick.setEvaluationMode(mode) found = True break if not found: raise Exception('Unsupported evaluation mode value') # arrival weight elif keyLower == 'weight': arrival.setWeight(float(value)) # arrival azimuth elif keyLower == 'theo. azimuth (deg)': arrival.setAzimuth(float(value)) # arrival backazimuth elif keyLower == 'theo. backazimuth (deg)': pick.setBackazimuth(DataModel.RealQuantity(float(value))) # arrival distance elif keyLower == 'distance (deg)': arrival.setDistance(float(value)) # ignored elif keyLower == 'distance (km)': Logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) # arrival time residual elif keyLower == 'residual time': arrival.setTimeResidual(float(value)) # ignored elif keyLower == 'quality number': Logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) # station magnitude value and type elif keyLower.startswith('magnitude '): stationMag = DataModel.StationMagnitude.Create() stationMag.setAmplitudeID(amplitude.publicID()) stationMag.setMagnitude( DataModel.RealQuantity(float(value))) magType = self.parseMagType(key[10:]) if len(magType) > 0: stationMag.setType(magType) amplitude.setType(magType) ############################################################### # origin parameters # event ID, added as origin comment later on elif keyLower == 'event id': originComments[key] = value # magnitude value and type elif keyLower.startswith('mean magnitude '): magnitude.setMagnitude(DataModel.RealQuantity( float(value))) magType = self.parseMagType(key[15:]) if len(magType) > 0: magnitude.setType(magType) # latitude elif keyLower == 'latitude': origin.latitude().setValue(float(value)) latFound = True elif keyLower == 'error in latitude (km)': origin.latitude().setUncertainty(float(value)) # longitude elif keyLower == 'longitude': origin.longitude().setValue(float(value)) lonFound = True elif keyLower == 'error in longitude (km)': origin.longitude().setUncertainty(float(value)) # depth elif keyLower == 'depth (km)': origin.setDepth(DataModel.RealQuantity(float(value))) if depthError is not None: origin.depth().setUncertainty(depthError) elif keyLower == 'depth type': Logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) elif keyLower == 'error in depth (km)': depthError = float(value) try: origin.depth().setUncertainty(depthError) except Core.ValueException: pass # time elif keyLower == 'origin time': origin.time().setValue(self.parseTime(value)) elif keyLower == 'error in origin time': origin.time().setUncertainty(float(value)) # region table, added as origin comment later on elif keyLower == 'region table': originComments[key] = value # region table, added as origin comment later on elif keyLower == 'region id': originComments[key] = value # source region, added as origin comment later on elif keyLower == 'source region': originComments[key] = value # used station count elif keyLower == 'no. of stations used': if originQuality is None: originQuality = DataModel.OriginQuality() originQuality.setUsedStationCount(int(value)) # ignored elif keyLower == 'reference location name': Logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) # confidence ellipsoid major axis elif keyLower == 'error ellipse major': if originCE is None: originCE = DataModel.ConfidenceEllipsoid() originCE.setSemiMajorAxisLength(float(value)) # confidence ellipsoid minor axis elif keyLower == 'error ellipse minor': if originCE is None: originCE = DataModel.ConfidenceEllipsoid() originCE.setSemiMinorAxisLength(float(value)) # confidence ellipsoid rotation elif keyLower == 'error ellipse strike': if originCE is None: originCE = DataModel.ConfidenceEllipsoid() originCE.setMajorAxisRotation(float(value)) # azimuthal gap elif keyLower == 'max azimuthal gap (deg)': if originQuality is None: originQuality = DataModel.OriginQuality() originQuality.setAzimuthalGap(float(value)) # creation info author elif keyLower == 'author': origin.creationInfo().setAuthor(value) # creation info agency elif keyLower == 'agency': origin.creationInfo().setAgencyID(value) # earth model id elif keyLower == 'velocity model': origin.setEarthModelID(value) # standard error elif keyLower == 'rms of residuals (sec)': if originQuality is None: originQuality = DataModel.OriginQuality() originQuality.setStandardError(float(value)) # ignored elif keyLower == 'phase flags': Logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) # ignored elif keyLower == 'location input params': Logging.debug('Line %i: ignoring parameter: %s' % (iLine, key)) # unknown key else: Logging.warning('Line %i: ignoring unknown parameter: %s' \ % (iLine, key)) except ValueError, ve: Logging.warning('Line %i: can not parse %s value' % (iLine, key)) except Exception: Logging.error('Line %i: %s' % (iLine, str(traceback.format_exc()))) return None
def input(self): fastsdsPrefix = 'fastsds://' if self.__url.startswith(fastsdsPrefix): fastsds = SDS(self.__url[len(fastsdsPrefix):]) else: fastsds = None for (net, sta, loc, cha, startt, endt, restricted, archNet) in self.__tw: if not archNet: archNet = net size = 0 if fastsds: start = dateutil.parser.parse( startt.iso()).replace(tzinfo=None) end = dateutil.parser.parse(endt.iso()).replace(tzinfo=None) for data in fastsds.getRawBytes(start, end, archNet, sta, loc, cha, self.__bufferSize): size += len(data) if archNet == net: yield data else: try: yield self.__override_network(data, net) except Exception, e: Logging.error( "could not override network code: %s" % str(e)) else: rs = RecordStream.Open(self.__url) if rs is None: Logging.error("could not open record stream") break rs.addStream(archNet, sta, loc, cha, startt, endt) rsInput = RecordInput(rs, Array.INT, Record.SAVE_RAW) eof = False while not eof: data = "" while len(data) < self.__bufferSize: try: rec = rsInput.next() except Exception, e: Logging.error("%s" % str(e)) eof = True break if rec is None: eof = True break data += rec.raw().str() if data: size += len(data) if archNet == net: yield data else: try: yield self.__override_network(data, net) except Exception, e: Logging.error( "could not override network code: %s" % str(e))
def __getWaveform(self, startt, endt, msFile, bufferSize): if startt >= endt: return rec = mseedlite.Record(msFile) reclen = rec.size recStart = 0 timeStart = rec.begin_time if rec.begin_time >= endt: return msFile.seek(-reclen, 2) rec = mseedlite.Record(msFile) recEnd = msFile.tell() / reclen - 1 timeEnd = rec.begin_time if rec.end_time <= startt: return if timeStart >= timeEnd: Logging.error("%s: overlap detected (start=%s, end=%s)" % (msFile.name, timeStart, timeEnd)) return (lower, et1) = self.__time2recno(msFile, reclen, timeStart, recStart, timeEnd, recEnd, startt) (upper, et2) = self.__time2recno(msFile, reclen, startt, lower, timeEnd, recEnd, endt) if upper < lower: Logging.error("%s: overlap detected (lower=%d, upper=%d)" % (msFile.name, lower, upper)) upper = lower msFile.seek(lower * reclen) remaining = (upper - lower + 1) * reclen check = True if bufferSize % reclen: bufferSize += reclen - bufferSize % reclen while remaining > 0: size = min(remaining, bufferSize) data = msFile.read(size) remaining -= size offset = 0 if not data: return if check: while offset < len(data): rec = mseedlite.Record(data[offset:offset+reclen]) if rec.begin_time >= endt: return if rec.end_time > startt: break offset += reclen check = False if offset < len(data): yield data[offset:] if offset else data while True: data = msFile.read(reclen) if not data: return rec = mseedlite.Record(data) if rec.begin_time >= endt: return yield data
try: cp = ConfigParser.ConfigParser() Logging.notice("reading inventory filter file: %s" % fileName) cp.readfp(open(fileName, 'r')) if len(cp.sections()) == 0: return True # check for mandatory code attribute for sectionName in cp.sections(): code = "" try: code = cp.get(sectionName, "code") except: Logging.error("missing 'code' attribute in section %s of " \ "inventory filter file %s" % ( sectionName, fileName)) return False rule = FilterRule(sectionName, str(code)) try: rule.restricted = cp.getboolean(sectionName, 'restricted') except: pass try: rule.shared = cp.getboolean(sectionName, 'shared') except: pass
def input(self): fastsdsPrefix = 'fastsds://' if self.__url.startswith(fastsdsPrefix): fastsds = SDS(self.__url[len(fastsdsPrefix):]) else: fastsds = None for (net, sta, loc, cha, startt, endt, restricted, archNet) in self.__tw: if not archNet: archNet = net size = 0 if fastsds: start = dateutil.parser.parse( startt.iso()).replace(tzinfo=None) end = dateutil.parser.parse(endt.iso()).replace(tzinfo=None) for data in fastsds.getRawBytes(start, end, archNet, sta, loc, cha, self.__bufferSize): size += len(data) if archNet == net: yield data else: try: yield self.__override_network(data, net) except Exception as e: Logging.error( "could not override network code: %s" % str(e)) else: rs = RecordStream.Open(self.__url) if rs is None: Logging.error("could not open record stream") break rs.addStream(archNet, sta, loc, cha, startt, endt) rsInput = RecordInput(rs, Array.INT, Record.SAVE_RAW) eof = False while not eof: data = b"" while len(data) < self.__bufferSize: try: rec = rsInput.next() except Exception as e: Logging.error("%s" % str(e)) eof = True break if rec is None: eof = True break data += rec.raw().str() if data: size += len(data) if archNet == net: yield data else: try: yield self.__override_network(data, net) except Exception as e: Logging.error("could not override network " \ "code: %s" % str(e)) for tracker in self.__trackerList: net_class = 't' if net[0] in "0123456789XYZ" else 'p' if size == 0: tracker.line_status(startt, endt, net, sta, cha, loc, restricted, net_class, True, [], "fdsnws", "NODATA", 0, "") else: tracker.line_status(startt, endt, net, sta, cha, loc, restricted, net_class, True, [], "fdsnws", "OK", size, "")
def importFocalMechanism(self, eventID, fm): now = Core.Time.GMT() crea = DataModel.CreationInfo() crea.setAuthor("MT import script") crea.setAgencyID("TEST") crea.setCreationTime(now) crea.setModificationTime(now) event = self.query().loadObject(DataModel.Event.TypeInfo(), eventID) event = DataModel.Event.Cast(event) if event is None: Logging.error("unknown event '%s'" % eventID) return False originID = event.preferredOriginID() origin = self.query().loadObject(DataModel.Origin.TypeInfo(), originID) origin = DataModel.Origin.Cast(origin) if not origin: Logging.error("origin '%s' not loaded" % originID) return False # clone origin to attach Mw to it publicID = "MT#Origin#"+origin.publicID() origin = DataModel.Origin.Cast(origin.clone()) origin.setPublicID(publicID) origin.setCreationInfo(crea) if fm.Mw: magnitude = DataModel.Magnitude.Create() magnitude.setCreationInfo(crea) magnitude.setStationCount(0) magnitude.setMagnitude(DataModel.RealQuantity(fm.Mw)) magnitude.setType("Mw") origin.add(magnitude) # create and populate a focal mechanism focmecID = "FM#"+eventID+now.toString("#%Y%m%d.%H%M%S.%f000000")[:20] focmec = DataModel.FocalMechanism.Create(focmecID) focmec.setTriggeringOriginID(originID) try: np1 = DataModel.NodalPlane() np1.setStrike( DataModel.RealQuantity(fm.str1) ) np1.setDip( DataModel.RealQuantity(fm.dip1) ) np1.setRake( DataModel.RealQuantity(fm.rak1) ) except AttributeError: np1 = None try: np2 = DataModel.NodalPlane() np2.setStrike( DataModel.RealQuantity(fm.str2) ) np2.setDip( DataModel.RealQuantity(fm.dip2) ) np2.setRake( DataModel.RealQuantity(fm.rak2) ) except AttributeError: npr21 = None np = DataModel.NodalPlanes() if np1: np.setNodalPlane1(np1) if np2: np.setNodalPlane2(np2) focmec.setNodalPlanes(np) focmec.setCreationInfo(crea) focmec.setEvaluationStatus(DataModel.REVIEWED) focmec.setEvaluationMode(DataModel.MANUAL) # create moment tensor and populate it with (just) Mw momtenID = "MT#"+eventID+now.toString("#%Y%m%d.%H%M%S.%f000000")[:20] momten = DataModel.MomentTensor.Create(momtenID) momten = DataModel.MomentTensor.Cast(momten) momten.setDerivedOriginID(origin.publicID()) if fm.Mw: momten.setMomentMagnitudeID(magnitude.publicID()) momten.setCreationInfo(crea) # Obviously we could populate the entire moment tensor # elements, but we don't here to keep things simple and # because it's just for demo purposes. focmec.add(momten) # add the created objects to the EventParameters # then retrieve and send corresponding notifier messages ep = DataModel.EventParameters() DataModel.Notifier.Enable() ep.add(focmec) msg = DataModel.Notifier.GetMessage() if msg and not self.commandline().hasOption("test"): if not self.connection().send("FOCMECH", msg): sys.stderr.write("Failed to send focmec %s\n" % focmecID) DataModel.Notifier.Disable() DataModel.Notifier.Enable() ep.add(origin) msg = DataModel.Notifier.GetMessage() if msg and not self.commandline().hasOption("test"): if not self.connection().send("LOCATION", msg): sys.stderr.write("Failed to send origin %s\n" % originID) DataModel.Notifier.Disable() return True