def _processRequest(self, req, ro, dbq, exp): if req._disconnected: return False DataModel.PublicObject.SetRegistrationEnabled(False) # query event(s) ep = DataModel.EventParameters() if ro.eventIDs: for eID in ro.eventIDs: obj = dbq.getEventByPublicID(eID) e = DataModel.Event.Cast(obj) if not e: continue if self._eventTypeWhitelist or self._eventTypeBlacklist: eType = None try: eType = DataModel.EEventTypeNames_name(e.type()) except ValueError: pass if self._eventTypeWhitelist and \ not eType in self._eventTypeWhitelist: continue if self._eventTypeBlacklist and \ eType in self._eventTypeBlacklist: continue if self._evaluationMode is not None: obj = dbq.getObject(DataModel.Origin.TypeInfo(), e.preferredOriginID()) o = DataModel.Origin.Cast(obj) try: if o is None or \ o.evaluationMode() != self._evaluationMode: continue except ValueError: continue ep.add(e) else: self._findEvents(ep, ro, dbq) if ep.eventCount() == 0: msg = "no matching events found" data = HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro) if data: utils.writeTS(req, data) return True Logging.debug("events found: %i" % ep.eventCount()) if ro.format == 'csv' or not exp: req.setHeader('Content-Type', 'text/plain') else: req.setHeader('Content-Type', 'application/xml') if exp: return self._processRequestExp(req, ro, dbq, exp, ep) return self._processRequestText(req, ro, dbq, ep)
def addArrival(self, parent, pickID, azimuth, distance): arrival = scdatamodel.Arrival() arrival.setPickID(pickID) phase = scdatamodel.Phase('P') arrival.setPhase(phase) arrival.setWeight(1.0) arrival.setAzimuth(float(azimuth)) arrival.setTimeUsed(True) arrival.setPreliminary(True) arrival.setTimeResidual(0.001) arrival.setDistance(km2deg(float(distance))) parent.add(arrival) return arrival
def line_status(self, start_time, end_time, network, station, channel, location, restricted, net_class, shared, constraints, volume, status, size, message): if network is None or network == "": network = "." if station is None or station == "": station = "." if channel is None or channel == "": channel = "." if location is None or location == "": location = "." if volume is None: volume = "NODATA" if size is None: size = 0 if message is None: message = "" if isinstance(constraints, list): constr = " ".join(constraints) else: constr = " ".join( [a + "=" + b for (a, b) in constraints.iteritems()]) arclinkRequestLine = DataModel.ArclinkRequestLine() arclinkRequestLine.setStart(start_time) arclinkRequestLine.setEnd(end_time) arclinkRequestLine.setStreamID( DataModel.WaveformStreamID(network[:8], station[:8], location[:8], channel[:8], "")) arclinkRequestLine.setConstraints(constr) if isinstance(restricted, bool): arclinkRequestLine.setRestricted(restricted) arclinkRequestLine.setNetClass(net_class) if isinstance(shared, bool): arclinkRequestLine.setShared(shared) # arclinkStatusLine = DataModel.ArclinkStatusLine() arclinkStatusLine.setVolumeID(volume) arclinkStatusLine.setStatus(status) arclinkStatusLine.setSize(size) arclinkStatusLine.setMessage(message) # arclinkRequestLine.setStatus(arclinkStatusLine) self.requestLines.append(arclinkRequestLine) self.averageTimeWindow += end_time - start_time self.totalLineCount += 1 if status == "OK": self.okLineCount += 1
def run(self): if not self.query(): sys.stderr.write("No database connection!\n") return False xarc = IO.XMLArchive() if not xarc.create(self._outfile, True, True): sys.stderr.write("Could not create xml output file %s!\n" % self._outfile) return False xarc.setFormattedOutput(self.commandline().hasOption("formatted")) (net, sta, loc, cha) = self._streamID.split(".") it = self.query().getWaveformQuality( DataModel.WaveformStreamID(net, sta, loc, cha, ""), self._parameter, Core.Time.FromString(self._start, "%Y-%m-%d %H:%M:%S"), Core.Time.FromString(self._end, "%Y-%m-%d %H:%M:%S")) while it.get(): wfq = DataModel.WaveformQuality.Cast(it.get()) xarc.writeObject(wfq) it.step() xarc.close() return True
class FDSNEvent(resource.Resource): isLeaf = True #--------------------------------------------------------------------------- def __init__(self, hideAuthor=False, evaluationMode=None, eventTypeWhitelist=None, eventTypeBlacklist=None): self._hideAuthor = hideAuthor self._evaluationMode = evaluationMode self._eventTypeWhitelist = eventTypeWhitelist self._eventTypeBlacklist = eventTypeBlacklist #--------------------------------------------------------------------------- def render_GET(self, req): # Parse and validate GET parameters ro = _EventRequestOptions(req.args) try: ro.parse() except ValueError, e: Logging.warning(str(e)) return HTTP.renderErrorPage(req, http.BAD_REQUEST, str(e), ro) # Catalog filter is not supported, any filter value will result in 204 if ro.catalogs: msg = "no matching events found" return HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro) # updateafter not implemented if ro.updatedAfter: msg = "filtering based on update time not supported" return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) # Exporter, 'None' is used for text output if ro.format in ro.VText: exp = None else: exp = Exporter.Create(ro.Exporters[ro.format]) if exp: exp.setFormattedOutput(bool(ro.formatted)) else: msg = "output format '%s' no available, export module '%s' could " \ "not be loaded." % (ro.format, ro.Exporters[ro.format]) return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) # Create database query dbq = DataModel.DatabaseQuery(Application.Instance().database()) if dbq.hasError(): msg = "could not connect to database: %s" % dbq.errorMsg() return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) # Process request in separate thread d = deferToThread(self._processRequest, req, ro, dbq, exp) req.notifyFinish().addErrback(utils.onCancel, d) d.addBoth(utils.onFinish, req) # The request is handled by the deferred object return server.NOT_DONE_YET
def addMagnitude(self, parent, magval, magu=0, creationTime=None, agencyID=AGENCYID, author=AUTHOR): magval = float(magval) magu = float(magu) if not creationTime: creationTime = sccore.Time.GMT() mag = scdatamodel.Magnitude('') self.eparams.GenerateId(mag, parent.publicID() + '#netMag.M') mag.setMagnitude(scdatamodel.RealQuantity(magval)) mag.magnitude().setUncertainty(magu) self.addCreationInfo(mag, creationTime, agencyID, author) mag.setType('M') parent.add(mag)
def updateOrigin(self, origin, PID=None, ot=None, lat=None, lon=None, depth=None, mag=None, otu=None, latu=None, magu=None, lonu=None, depthu=None, creationTime=None, modificationTime=None): if PID: origin.setPublicID(PID) if ot: origin.setTime(scdatamodel.TimeQuantity(ot)) if lat: origin.setLatitude(scdatamodel.RealQuantity(float(lat))) if lon: origin.setLongitude(scdatamodel.RealQuantity(float(lon))) if depth: origin.setDepth(scdatamodel.RealQuantity(float(depth))) if latu: origin.latitude().setUncertainty(float(latu)) if lonu: origin.longitude().setUncertainty(float(lonu)) if depthu: origin.depth().setUncertainty(float(depthu)) if otu: origin.time().setUncertainty(float(otu)) if creationTime: ci = origin.creationInfo() ci.setCreationTime(creationTime) if modificationTime: ci = origin.creationInfo() ci.setModificationTime(creationTime) if mag or magu: m = origin.magnitude(0) if mag: m.setMagnitude(scdatamodel.RealQuantity(mag)) if magu: m.magnitude().setUncertainty(magu) if modificationTime: ci = m.creationInfo() ci.setModificationTime(creationTime)
def run(self): try: if self.dcid is None: print("Please specify datacenter/archive ID", file=sys.stderr) return False nettab = Nettab(self.dcid) instdb = Instruments(self.dcid) try: self.__load_file(instdb.load_db, self.inst_db_file) self.__load_file(nettab.load_statmap, self.stat_map_file) self.__load_file(nettab.load_access_net, self.access_net_file) self.__load_file(nettab.load_access_stat, self.access_stat_file) self.__load_file(instdb.load_sensor_attr, self.sensor_attr_file) self.__load_file(instdb.load_datalogger_attr, self.datalogger_attr_file) self.__load_file(nettab.load_network_attr, self.network_attr_file) self.__load_file(nettab.load_station_attr, self.station_attr_file) inv = SC3Inventory(DataModel.Inventory()) idx = 1 for tab in sorted(self.tab_files): print("Loading %s (%d/%d)" % (tab, idx, len(self.tab_files)), file=sys.stderr) self.__load_file(nettab.load_tab, tab) print("Generating data structures", file=sys.stderr) nettab.update_inventory(instdb, inv) idx = idx + 1 if self.isExitRequested(): print("Exit requested, abort", file=sys.stderr) return False print("Generating output", file=sys.stderr) ar = IO.XMLArchive() ar.setFormattedOutput( self.commandline().hasOption("formatted")) ar.create(self.out_file) ar.writeObject(inv.obj) ar.close() print("Finished", file=sys.stderr) except (IOError, NettabError) as e: logs.error("fatal error: " + str(e)) return False except Exception: logs.print_exc() return False return True
def __init__(self, xmlOut): self.ar = scio.XMLArchive() # seiscomp xml creator self.ar.setFormattedOutput(True) # output formatted xml file self.xmlOut = xmlOut self.eparams = scdatamodel.EventParameters() self.eparams.SetIdGeneration(True) self.eparams.SetIdPattern('@classname@#@time/%Y%m%d%H%M%S.%f@.@id@') self._stage = 2 self.Eid = '0'
def createOrigin(self, ot, lat, lon, depth=8, otu=0, latu=0, lonu=0, depthu=0, mag=-99, magu=0, reported=0, creationTime=None, agencyID=AGENCYID, author=AUTHOR): if not creationTime: creationTime = sccore.Time.GMT() lat, lon, depth, otu, latu, lonu, depthu = [ float(i) for i in lat, lon, depth, otu, latu, lonu, depthu ] # convert values to floats origin = scdatamodel.Origin('') self.eparams.GenerateId(origin) origin.setLongitude(scdatamodel.RealQuantity(lon)) origin.longitude().setUncertainty(lonu) origin.setLatitude(scdatamodel.RealQuantity(lat)) origin.latitude().setUncertainty(latu) origin.setDepth(scdatamodel.RealQuantity(depth)) origin.depth().setUncertainty(depthu) origin.setTime(scdatamodel.TimeQuantity(ot)) origin.time().setUncertainty(otu) origin.setEvaluationMode(1) if int(reported): origin.setEvaluationStatus(scdatamodel.REPORTED) else: origin.setEvaluationStatus(scdatamodel.PRELIMINARY) self.addCreationInfo(origin, creationTime=creationTime, agencyID=AGENCYID, author=AUTHOR) self.addMagnitude(origin, magval=mag, magu=magu, creationTime=creationTime) return origin
def send_notifiers(self, group): Nsize = DataModel.Notifier.Size() if Nsize > 0: Logging.info("trying to apply %d change%s" % (Nsize, "s" if Nsize != 1 else "")) else: Logging.info("no changes to apply") return 0 Nmsg = DataModel.Notifier.GetMessage(True) it = Nmsg.iter() msg = DataModel.NotifierMessage() maxmsg = 100 sent = 0 mcount = 0 try: try: while it.get(): msg.attach(DataModel.Notifier_Cast(it.get())) mcount += 1 if msg and mcount == maxmsg: sent += mcount Logging.debug("sending message (%5.1f %%)" % (sent / float(Nsize) * 100.0)) self.send(group, msg) msg.clear() mcount = 0 self.sync() it.next() except: pass finally: if msg.size(): Logging.debug("sending message (%5.1f %%)" % 100.0) self.send(group, msg) msg.clear() self.sync() Logging.info("done") return mcount
def render_GET(self, req): # Parse and validate GET parameters ro = _EventRequestOptions() try: ro.parseGET(req.args) ro.parse() except ValueError as e: Logging.warning(str(e)) return self.renderErrorPage(req, http.BAD_REQUEST, str(e), ro) # Catalog filter is not supported if ro.catalogs: msg = "catalog filter not supported" return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro) if ro.comments and self._hideComments: msg = "including of comments not supported" return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro) # updateafter not implemented if ro.updatedAfter: msg = "filtering based on update time not supported" return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro) if self._formatList is not None and ro.format not in self._formatList: msg = "output format '%s' not available" % ro.format return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro) # Exporter, 'None' is used for text output if ro.format in ro.VText: exp = None else: exp = Exporter.Create(ro.Exporters[ro.format]) if exp: exp.setFormattedOutput(bool(ro.formatted)) else: msg = "output format '%s' not available, export module '%s' could " \ "not be loaded." % (ro.format, ro.Exporters[ro.format]) return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro) # Create database query db = DatabaseInterface.Open(Application.Instance().databaseURI()) if db is None: msg = "could not connect to database" return self.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro) dbq = DataModel.DatabaseQuery(db) # Process request in separate thread d = deferToThread(self._processRequest, req, ro, dbq, exp) req.notifyFinish().addErrback(utils.onCancel, d) d.addBoth(utils.onFinish, req) # The request is handled by the deferred object return server.NOT_DONE_YET
def _loadXml(self, folder): print >> sys.stderr, " Loading inventory from XML file ... ", for f in glob.glob(os.path.join(folder, "*.xml")): ar = IO.XMLArchive() ar.open(f) inventory = DataModel.Inventory_Cast(ar.readObject()) ar.close() if inventory: self.stationResolver.collectStations(inventory) print >> sys.stderr, "Done."
def loadEventParametersObject(self, publicID): for tp in \ DataModel.Pick, DataModel.Amplitude, DataModel.Origin, \ DataModel.Event, DataModel.FocalMechanism, \ DataModel.Magnitude, DataModel.StationMagnitude: obj = self.query().loadObject(tp.TypeInfo(), publicID) obj = tp.Cast(obj) if obj: ep = DataModel.EventParameters() ep.add(obj) return ep
def _processStation(newNet, sta, ro, skipRestricted): chaCount = 0 dataloggers, sensors = set(), set() newSta = DataModel.Station(sta) for loc in ro.locationIter(sta, True): newLoc = DataModel.SensorLocation(loc) for stream in ro.streamIter(loc, True): if skipRestricted and utils.isRestricted(stream): continue newLoc.add(DataModel.Stream(stream)) dataloggers.add(stream.datalogger()) sensors.add(stream.sensor()) if newLoc.streamCount() > 0: newSta.add(newLoc) chaCount += newLoc.streamCount() if newSta.sensorLocationCount() > 0: newNet.add(newSta) return chaCount, newSta.sensorLocationCount(), dataloggers, sensors return 0, 0, [], []
def volume_status(self, volume, status, size, message): if volume is None: volume = "NODATA" if size is None: size = 0 if message is None: message = "" arclinkStatusLine = DataModel.ArclinkStatusLine() arclinkStatusLine.setVolumeID(volume) arclinkStatusLine.setStatus(status) arclinkStatusLine.setSize(size) arclinkStatusLine.setMessage(message) self.statusLines.append(arclinkStatusLine)
def addStationMagnitude(self, parent, magval, amp, weight=1, creationTime=None, agencyID=AGENCYID, author=AUTHOR): magval = float(magval) if not creationTime: creationTime = sccore.Time.GMT() net = amp.waveformID().networkCode() sta = amp.waveformID().stationCode() mag = scdatamodel.StationMagnitude(parent.publicID() + '#staMag.' + amp.type() + '#' + net + '.' + sta) mag.setMagnitude(scdatamodel.RealQuantity(magval)) mag.setType(amp.type()) mag.setAmplitudeID(amp.publicID()) mag.setWaveformID(amp.waveformID()) self.addCreationInfo(mag, creationTime, agencyID, author) parent.add(mag) contrib = scdatamodel.StationMagnitudeContribution( mag.publicID(), 0, weight) parent.magnitude(0).add(contrib)
def _loadDatabase(self, dbUrl): m = re.match( "(?P<dbDriverName>^.*):\/\/(?P<dbAddress>.+?:.+?@.+?\/.+$)", dbUrl) if not m: raise Exception("error in parsing SC3 DB url") db = m.groupdict() try: registry = Client.PluginRegistry.Instance() registry.addPluginName("dbmysql") registry.loadPlugins() except Exception as e: raise "Cannot load database driver: %s" dbDriver = IO.DatabaseInterface.Create(db["dbDriverName"]) if dbDriver is None: raise Exception("Cannot find database driver " + db["dbDriverName"]) if not dbDriver.connect(db["dbAddress"]): raise Exception("Cannot connect to database at " + db["dbAddress"]) dbQuery = DataModel.DatabaseQuery(dbDriver) if dbQuery is None: raise Exception("Cannot get DB query object") print(" Loading inventory from database ... ", end=' ', file=sys.stderr) inventory = DataModel.Inventory() dbQuery.loadNetworks(inventory) for ni in range(inventory.networkCount()): dbQuery.loadStations(inventory.network(ni)) print("Done.", file=sys.stderr) if inventory: self.stationResolver.collectStations(inventory, True)
def process(self, line): # parse one input line and send the resulting pick ep = DataModel.EventParameters() DataModel.Notifier.Enable() pick = self.parse(line) ep.add(pick) msg = DataModel.Notifier.GetMessage() if self.commandline().hasOption("test"): print("I would now send pick", pick.publicID()) else: if self.connection().send(msg): print("Succeeded to send pick", pick.publicID()) else: print("Failed to send pick", pick.publicID()) DataModel.Notifier.Disable()
def addCreationInfo(self, parent=None, creationTime=None, agencyID=AGENCYID, author=AUTHOR, modificationTime=None): if not creationTime: creationTime = sccore.Time.GMT() if not modificationTime: modificationTime = sccore.Time.GMT() ci = scdatamodel.CreationInfo() ci.setAgencyID(agencyID) ci.setCreationTime(creationTime) ci.setAuthor(author) ci.setModificationTime(modificationTime) if parent: parent.setCreationInfo(ci) return ci
def addAmplitude(self, ampval, amptype, snr, pickid, timeref, starttime, endtime, creationTime=None, agencyID=AGENCYID, author=AUTHOR): ampval, snr, starttime, endtime = float(ampval), float(snr), float( starttime), float(endtime) if not creationTime: creationTime = sccore.Time.GMT() amp = scdatamodel.Amplitude(pickid + '.' + amptype) amp.setType(amptype) amp.setAmplitude(scdatamodel.RealQuantity(ampval)) amp.setTimeWindow(scdatamodel.TimeWindow(timeref, starttime, endtime)) amp.setSnr(snr) amp.setPickID(pickid) amp.setWaveformID(self.eparams.findPick(pickid).waveformID()) self.addCreationInfo(amp, creationTime, agencyID, author) self.eparams.add(amp) return amp
def run(self): while True: msg = self.__conn.readMessage(False) if msg is None: time.sleep(1) continue for obj in msg: notifier = DataModel.Notifier_Cast(obj) if notifier: RequestHandler.dataLock.acquire() try: notifier.apply() self.__rh.updateInventory() finally: RequestHandler.dataLock.release()
def addOriginQuality(self, origin, ntrig, nsta, azspan, maxdist, mindist, staerr): # add origin quality ntrig, nsta = [int(i) for i in (ntrig, nsta)] azgap = 360 - float(azspan) maxdist = km2deg(float(maxdist)) mindist = km2deg(float(mindist)) staerr = float(staerr) oq = scdatamodel.OriginQuality() oq.setUsedPhaseCount(ntrig) oq.setAssociatedPhaseCount(ntrig) oq.setUsedStationCount(nsta) oq.setAssociatedStationCount(nsta) oq.setAzimuthalGap(azgap) oq.setMaximumDistance(maxdist) oq.setMinimumDistance(mindist) oq.setStandardError(staerr) origin.setQuality(oq) return oq
def sc3Obj(self, sc3i=None): if not sc3i: sc3i = DataModel.Inventory() for network in list(self.n.values()): sc3n = network.sc3Obj(self.i) sc3i.add(sc3n) for sc3o in self.i.sc3Objs(): sc3i.add(sc3o) self.stationResolver.collectStations(sc3i) for stationGroup in list(self.g.values()): sc3g = stationGroup.sc3Obj(self.stationResolver) sc3i.add(sc3g) return sc3i
def sendNotifiers(self, group): Nsize = DataModel.Notifier.Size() if Nsize > 0: logd("trying to apply %d changes..." % Nsize) else: logd("no changes to apply") return Nmsg = DataModel.Notifier.GetMessage(True) it = Nmsg.iter() msg = DataModel.NotifierMessage() maxmsg = 100 sent = 0 mcount = 0 try: try: while it.get(): msg.attach(DataModel.Notifier.Cast(it.get())) mcount += 1 if msg and mcount == maxmsg: sent += mcount logd("sending message (%5.1f %%)" % (sent / float(Nsize) * 100.0)) self.send(group, msg) msg.clear() mcount = 0 self.sync("_sccfgupd_") it.next() except: pass finally: if msg.size(): logd("sending message (%5.1f %%)" % 100.0) self.send(group, msg) msg.clear() self.sync("_sccfgupd_")
def getDACache(self): if not self._daEnabled: return None now = Core.Time.GMT() # check if cache is still valid if self._daCache is None or now > self._daCache.validUntil(): if self.query() is None or \ not self.query().driver().isConnected(): dbInt = IO.DatabaseInterface.Open(self.databaseURI()) if dbInt is None: Logging.error('failed to connect to database') return self._daCache else: self.setDatabase(dbInt) da = DataModel.DataAvailability() self.query().loadDataExtents(da) validUntil = now + Core.TimeSpan(self._daCacheDuration, 0) self._daCache = DataAvailabilityCache(self, da, validUntil) return self._daCache
def _processStation(newNet, net, sta, ro, dac, skipRestricted): chaCount = 0 dataloggers, sensors, extents = set(), set(), set() newSta = DataModel.Station(sta) includeAvailability = dac is not None and ro.availability # Copy comments for i in xrange(sta.commentCount()): newSta.add(DataModel.Comment(sta.comment(i))) for loc in ro.locationIter(net, sta, True): newLoc = DataModel.SensorLocation(loc) # Copy comments for i in xrange(loc.commentCount()): newLoc.add(DataModel.Comment(loc.comment(i))) for stream in ro.streamIter(net, sta, loc, True, dac): if skipRestricted and utils.isRestricted(stream): continue newCha = DataModel.Stream(stream) # Copy comments for i in xrange(stream.commentCount()): newCha.add(DataModel.Comment(stream.comment(i))) newLoc.add(newCha) dataloggers.add(stream.datalogger()) sensors.add(stream.sensor()) if includeAvailability: ext = dac.extent(net.code(), sta.code(), loc.code(), stream.code()) if ext is not None: extents.add(ext) if newLoc.streamCount() > 0: newSta.add(newLoc) chaCount += newLoc.streamCount() if newSta.sensorLocationCount() > 0: newNet.add(newSta) return chaCount, newSta.sensorLocationCount(), dataloggers, \ sensors, extents return 0, 0, [], [], []
def _copyReferences(self, newInv, req, objCount, inv, ro, dataloggers, sensors, maxObj): responses = set() decCount = 0 # datalogger for i in xrange(inv.dataloggerCount()): if req._disconnected: return None logger = inv.datalogger(i) if logger.publicID() not in dataloggers: continue newLogger = DataModel.Datalogger(logger) newInv.add(newLogger) # decimations are only needed for responses if ro.includeRes: for j in xrange(logger.decimationCount()): decimation = logger.decimation(j) newLogger.add(DataModel.Decimation(decimation)) # collect response ids filterStr = "" try: filterStr = decimation.analogueFilterChain().content( ) + " " except ValueError: pass try: filterStr += decimation.digitalFilterChain().content() except ValueError: pass for resp in filterStr.split(): responses.add(resp) decCount += newLogger.decimationCount() objCount += newInv.dataloggerCount() + decCount resCount = len(responses) if not self.checkObjects(req, objCount + resCount, maxObj): return None # sensor for i in xrange(inv.sensorCount()): if req._disconnected: return None sensor = inv.sensor(i) if sensor.publicID() not in sensors: continue newSensor = DataModel.Sensor(sensor) newInv.add(newSensor) resp = newSensor.response() if resp: if ro.includeRes: responses.add(resp) else: # no responses: remove response reference to avoid missing # response warning of exporter newSensor.setResponse("") objCount += newInv.sensorCount() resCount = len(responses) if not self.checkObjects(req, objCount + resCount, maxObj): return None # responses if ro.includeRes: if req._disconnected: return None for i in xrange(inv.responsePAZCount()): resp = inv.responsePAZ(i) if resp.publicID() in responses: newInv.add(DataModel.ResponsePAZ(resp)) if req._disconnected: return None for i in xrange(inv.responseFIRCount()): resp = inv.responseFIR(i) if resp.publicID() in responses: newInv.add(DataModel.ResponseFIR(resp)) if req._disconnected: return None for i in xrange(inv.responsePolynomialCount()): resp = inv.responsePolynomial(i) if resp.publicID() in responses: newInv.add(DataModel.ResponsePolynomial(resp)) if req._disconnected: return None for i in xrange(inv.responseFAPCount()): resp = inv.responseFAP(i) if resp.publicID() in responses: newInv.add(DataModel.ResponseFAP(resp)) if req._disconnected: return None for i in xrange(inv.responseIIRCount()): resp = inv.responseIIR(i) if resp.publicID() in responses: newInv.add(DataModel.ResponseIIR(resp)) return decCount
def _processRequestExp(self, req, ro, exp, dac): if req._disconnected: return False staCount, locCount, chaCount, extCount, objCount = 0, 0, 0, 0, 0 DataModel.PublicObject.SetRegistrationEnabled(False) newInv = DataModel.Inventory() dataloggers, sensors, extents = set(), set(), set() skipRestricted = not self._allowRestricted or \ (ro.restricted is not None and not ro.restricted) levelNet = not ro.includeSta levelSta = ro.includeSta and not ro.includeCha # iterate over inventory networks for net in ro.networkIter(self._inv, levelNet): if req._disconnected: return False if skipRestricted and utils.isRestricted(net): continue newNet = DataModel.Network(net) # Copy comments for i in xrange(net.commentCount()): newNet.add(DataModel.Comment(net.comment(i))) # iterate over inventory stations of current network for sta in ro.stationIter(net, levelSta): if req._disconnected: return False if skipRestricted and utils.isRestricted(sta): continue if not self.checkObjects(req, objCount, self._maxObj): return False if ro.includeCha: numCha, numLoc, d, s, e = \ self._processStation(newNet, net, sta, ro, dac, skipRestricted) if numCha > 0: locCount += numLoc chaCount += numCha extCount += len(e) objCount += numLoc + numCha + extCount if not self.checkObjects(req, objCount, self._maxObj): return False dataloggers |= d sensors |= s extents |= e elif self._matchStation(net, sta, ro, dac): if ro.includeSta: newSta = DataModel.Station(sta) # Copy comments for i in xrange(sta.commentCount()): newSta.add(DataModel.Comment(sta.comment(i))) newNet.add(newSta) else: # no station output requested: one matching station # is sufficient to include the network newInv.add(newNet) objCount += 1 break if newNet.stationCount() > 0: newInv.add(newNet) staCount += newNet.stationCount() objCount += staCount + 1 # Return 204 if no matching inventory was found if newInv.networkCount() == 0: msg = "no matching inventory found" data = self.renderErrorPage(req, http.NO_CONTENT, msg, ro) if data: utils.writeTS(req, data) return True # Copy references (dataloggers, responses, sensors) decCount, resCount = 0, 0 if ro.includeCha: decCount = self._copyReferences(newInv, req, objCount, self._inv, ro, dataloggers, sensors, self._maxObj) if decCount is None: return False else: resCount = newInv.responsePAZCount() + \ newInv.responseFIRCount() + \ newInv.responsePolynomialCount() + \ newInv.responseFAPCount() + \ newInv.responseIIRCount() objCount += resCount + decCount + newInv.dataloggerCount() + \ newInv.sensorCount() # Copy data extents objOut = newInv if len(extents) > 0: objCount += 1 da = DataModel.DataAvailability() for e in extents: da.add(DataModel.DataExtent(e)) objOut = ExportObjectList() objOut.append(newInv) objOut.append(da) sink = utils.Sink(req) if not exp.write(sink, objOut): return False Logging.debug( "%s: returned %iNet, %iSta, %iLoc, %iCha, " "%iDL, %iDec, %iSen, %iRes, %iDAExt (total objects/" "bytes: %i/%i) " % (ro.service, newInv.networkCount(), staCount, locCount, chaCount, newInv.dataloggerCount(), decCount, newInv.sensorCount(), resCount, extCount, objCount, sink.written)) utils.accessLog(req, ro, http.OK, sink.written, None) return True
def run(self): """ Things to do: * load event * load preferred origin without arrivals * load at least the preferred magnitude if available, all magnitudes if requested * load focal mechanism incl. moment tensor depending on availability, incl. Mw from derived origin """ evid = self.commandline().optionString("event") # Load event and preferred origin. This is the minimum # required info and if it can't be loaded, give up. event = self._loadEvent(evid) if event is None: raise ValueError, "unknown event '" + evid + "'" # preferredOrigin = self._loadOrigin(event.preferredOriginID()) preferredOrigin = self.query().getObject(DataModel.Origin.TypeInfo(), event.preferredOriginID()) preferredOrigin = DataModel.Origin.Cast(preferredOrigin) if preferredOrigin is None: raise ValueError, "unknown origin '" + event.preferredOriginID( ) + "'" # take care of origin references and leave just one for the preferred origin while (event.originReferenceCount() > 0): event.removeOriginReference(0) if preferredOrigin: event.add(DataModel.OriginReference(preferredOrigin.publicID())) if self.commandline().hasOption("comments"): self.query().loadComments(preferredOrigin) # load all magnitudes for preferredOrigin if self.commandline().hasOption("all-magnitudes"): self.query().loadMagnitudes(preferredOrigin) magnitudes = [ preferredOrigin.magnitude(i) for i in range(preferredOrigin.magnitudeCount()) ] else: magnitudes = [] if event.preferredMagnitudeID(): # try to load from memory for mag in magnitudes: if mag.publicID() == event.preferredMagnitudeID(): preferredMagnitude = mag break # preferredMagnitude = DataModel.Magnitude.Find(event.preferredMagnitudeID()) else: # try to load it from database preferredMagnitude = self._loadMagnitude( event.preferredMagnitudeID()) else: preferredMagnitude = None # try to load focal mechanism, moment tensor, moment magnitude and related origins momentTensor = momentMagnitude = derivedOrigin = triggeringOrigin = None # default focalMechanism = self._loadFocalMechanism( event.preferredFocalMechanismID()) if focalMechanism: if focalMechanism.triggeringOriginID(): if event.preferredOriginID( ) == focalMechanism.triggeringOriginID(): triggeringOrigin = preferredOrigin else: triggeringOrigin = self.query().getObject( DataModel.Origin.TypeInfo(), focalMechanism.triggeringOriginID()) triggeringOrigin = DataModel.Origin.Cast(triggeringOrigin) if focalMechanism.momentTensorCount() > 0: momentTensor = focalMechanism.momentTensor( 0) # FIXME What if there is more than one MT? if momentTensor.derivedOriginID(): derivedOrigin = self.query().getObject( DataModel.Origin.TypeInfo(), momentTensor.derivedOriginID()) derivedOrigin = DataModel.Origin.Cast(derivedOrigin) if momentTensor.momentMagnitudeID(): if momentTensor.momentMagnitudeID( ) == event.preferredMagnitudeID(): momentMagnitude = preferredMagnitude else: momentMagnitude = self._loadMagnitude( momentTensor.momentMagnitudeID()) # take care of FocalMechanism and related references if derivedOrigin: event.add(DataModel.OriginReference(derivedOrigin.publicID())) if triggeringOrigin: if event.preferredOriginID() != triggeringOrigin.publicID(): event.add( DataModel.OriginReference(triggeringOrigin.publicID())) while (event.focalMechanismReferenceCount() > 0): event.removeFocalMechanismReference(0) if focalMechanism: event.add( DataModel.FocalMechanismReference( focalMechanism.publicID())) self._removeCommentsIfRequested(focalMechanism) # strip creation info if not self.commandline().hasOption("include-full-creation-info"): self._stripCreationInfo(event) if focalMechanism: self._stripCreationInfo(focalMechanism) for i in xrange(focalMechanism.momentTensorCount()): self._stripCreationInfo(focalMechanism.momentTensor(i)) for org in [preferredOrigin, triggeringOrigin, derivedOrigin]: if org is not None: self._stripCreationInfo(org) for i in xrange(org.magnitudeCount()): self._stripCreationInfo(org.magnitude(i)) # populate EventParameters instance ep = DataModel.EventParameters() ep.add(event) if preferredMagnitude and preferredMagnitude is not momentMagnitude: preferredOrigin.add(preferredMagnitude) ep.add(preferredOrigin) if focalMechanism: if triggeringOrigin: if triggeringOrigin is not preferredOrigin: ep.add(triggeringOrigin) if derivedOrigin: if momentMagnitude: derivedOrigin.add(momentMagnitude) ep.add(derivedOrigin) ep.add(focalMechanism) # finally dump event parameters as formatted XML archive to stdout ar = IO.XMLArchive() ar.setFormattedOutput(True) ar.create("-") ar.writeObject(ep) ar.close() del ep