def _processRequestExp(self, req, ro, dbq, exp, ep): objCount = ep.eventCount() maxObj = Application.Instance()._queryObjects if not HTTP.checkObjects(req, objCount, maxObj): return False pickIDs = set() if ro.picks is None: ro.picks = True # add related information for iEvent in xrange(ep.eventCount()): if req._disconnected: return False e = ep.event(iEvent) if self._hideAuthor: self._removeAuthor(e) originIDs = set() magIDs = set() magIDs.add(e.preferredMagnitudeID()) # eventDescriptions and comments objCount += dbq.loadEventDescriptions(e) if ro.comments: objCount += self._loadComments(dbq, e) if not HTTP.checkObjects(req, objCount, maxObj): return False # origin references: either all or preferred only dbIter = dbq.getObjects(e, DataModel.OriginReference.TypeInfo()) for obj in dbIter: oRef = DataModel.OriginReference.Cast(obj) if oRef is None: continue if ro.allOrigins: e.add(oRef) originIDs.add(oRef.originID()) elif oRef.originID() == e.preferredOriginID(): e.add(oRef) originIDs.add(oRef.originID()) dbIter.close() objCount += e.originReferenceCount() # focalMechanism references: either none, preferred only or all if ro.fm or ro.allFMs: dbIter = dbq.getObjects(e, DataModel.FocalMechanismReference.TypeInfo()) for obj in dbIter: fmRef = DataModel.FocalMechanismReference.Cast(obj) if fmRef is None: continue if ro.allFMs: e.add(fmRef) elif fmRef.focalMechanismID() == e.preferredFocalMechanismID(): e.add(fmRef) dbIter.close() objCount += e.focalMechanismReferenceCount() if not HTTP.checkObjects(req, objCount, maxObj): return False # focal mechanisms: process before origins to add derived origin to # originID list since it may be missing from origin reference list for iFMRef in xrange(e.focalMechanismReferenceCount()): if req._disconnected: return False fmID = e.focalMechanismReference(iFMRef).focalMechanismID() obj = dbq.getObject(DataModel.FocalMechanism.TypeInfo(), fmID) fm = DataModel.FocalMechanism.Cast(obj) if fm is None: continue ep.add(fm) objCount += 1 if self._hideAuthor: self._removeAuthor(fm) # comments if ro.comments: objCount += self._loadComments(dbq, fm) # momentTensors objCount += dbq.loadMomentTensors(fm) if not HTTP.checkObjects(req, objCount, maxObj): return False for iMT in xrange(fm.momentTensorCount()): mt = fm.momentTensor(iMT) originIDs.add(mt.derivedOriginID()) magIDs.add(mt.momentMagnitudeID()) if self._hideAuthor: self._removeAuthor(mt) if ro.comments: for iMT in xrange(fm.momentTensorCount()): objCount += self._loadComments(dbq, mt) objCount += dbq.loadDataUseds(mt); objCount += dbq.loadMomentTensorPhaseSettings(mt); if ro.staMTs: objCount += dbq.loadMomentTensorStationContributions(mt); for iStaMT in xrange(mt.momentTensorStationContributionCount()): objCount += dbq.load(mt.momentTensorStationContribution(iStaMT)) if not HTTP.checkObjects(req, objCount, maxObj): return False # find ID of origin containing preferred Magnitude if e.preferredMagnitudeID(): obj = dbq.getObject(DataModel.Magnitude.TypeInfo(), e.preferredMagnitudeID()) m = DataModel.Magnitude.Cast(obj) if m is not None: oID = dbq.parentPublicID(m) if oID: originIDs.add(oID) # origins for oID in originIDs: if req._disconnected: return False obj = dbq.getObject(DataModel.Origin.TypeInfo(), oID) o = DataModel.Origin.Cast(obj) if o is None: continue ep.add(o) objCount += 1 if self._hideAuthor: self._removeAuthor(o) # comments if ro.comments: objCount += self._loadComments(dbq, o) if not HTTP.checkObjects(req, objCount, maxObj): return False # magnitudes dbIter = dbq.getObjects(oID, DataModel.Magnitude.TypeInfo()) for obj in dbIter: mag = DataModel.Magnitude.Cast(obj) if mag is None: continue if ro.allMags: o.add(mag) elif mag.publicID() in magIDs: o.add(mag) dbIter.close() if self._hideAuthor: self._removeAuthor(mag) objCount += o.magnitudeCount() if ro.comments: for iMag in xrange(o.magnitudeCount()): objCount += self._loadComments(dbq, o.magnitude(iMag)) if not HTTP.checkObjects(req, objCount, maxObj): return False # TODO station magnitudes, amplitudes # - added pick id for each pick referenced by amplitude # arrivals if ro.arrivals: objCount += dbq.loadArrivals(o) if self._removeAuthor: for iArrival in xrange(o.arrivalCount()): self._removeAuthor(o.arrival(iArrival)) # collect pick IDs if requested if ro.picks: for iArrival in xrange(o.arrivalCount()): pickIDs.add(o.arrival(iArrival).pickID()) if not HTTP.checkObjects(req, objCount, maxObj): return False # picks if pickIDs: objCount += len(pickIDs) if not HTTP.checkObjects(req, objCount, maxObj): return False for pickID in pickIDs: obj = dbq.getObject(DataModel.Pick.TypeInfo(), pickID) pick = DataModel.Pick.Cast(obj) if pick is not None: if self._hideAuthor: self._removeAuthor(pick) if ro.comments: objCount += self._loadComments(dbq, pick) ep.add(pick) if not HTTP.checkObjects(req, objCount, maxObj): return False # write response sink = utils.Sink(req) if not exp.write(sink, ep): return False Logging.debug("%s: returned %i events and %i origins (total " \ "objects/bytes: %i/%i)" % (ro.service, ep.eventCount(), ep.originCount(), objCount, sink.written)) utils.accessLog(req, ro, http.OK, sink.written, None) return True
def _processRequestExp(self, req, ro, exp, dac): if req._disconnected: return False staCount, locCount, chaCount, extCount, objCount = 0, 0, 0, 0, 0 DataModel.PublicObject.SetRegistrationEnabled(False) newInv = DataModel.Inventory() dataloggers, sensors, extents = set(), set(), set() skipRestricted = not self._allowRestricted or \ (ro.restricted is not None and not ro.restricted) levelNet = not ro.includeSta levelSta = ro.includeSta and not ro.includeCha # iterate over inventory networks for net in ro.networkIter(self._inv, levelNet): if req._disconnected: return False if skipRestricted and utils.isRestricted(net): continue newNet = DataModel.Network(net) # Copy comments for i in xrange(net.commentCount()): newNet.add(DataModel.Comment(net.comment(i))) # iterate over inventory stations of current network for sta in ro.stationIter(net, levelSta): if req._disconnected: return False if skipRestricted and utils.isRestricted(sta): continue if not HTTP.checkObjects(req, objCount, self._maxObj): return False if ro.includeCha: numCha, numLoc, d, s, e = \ self._processStation(newNet, net, sta, ro, dac, skipRestricted) if numCha > 0: locCount += numLoc chaCount += numCha extCount += len(e) objCount += numLoc + numCha + extCount if not HTTP.checkObjects(req, objCount, self._maxObj): return False dataloggers |= d sensors |= s extents |= e elif self._matchStation(net, sta, ro, dac): if ro.includeSta: newSta = DataModel.Station(sta) # Copy comments for i in xrange(sta.commentCount()): newSta.add(DataModel.Comment(sta.comment(i))) newNet.add(newSta) else: # no station output requested: one matching station # is sufficient to include the network newInv.add(newNet) objCount += 1 break if newNet.stationCount() > 0: newInv.add(newNet) staCount += newNet.stationCount() objCount += staCount + 1 # Return 204 if no matching inventory was found if newInv.networkCount() == 0: msg = "no matching inventory found" data = HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro) if data: utils.writeTS(req, data) return True # Copy references (dataloggers, responses, sensors) decCount, resCount = 0, 0 if ro.includeCha: decCount = self._copyReferences(newInv, req, objCount, self._inv, ro, dataloggers, sensors, self._maxObj) if decCount is None: return False else: resCount = newInv.responsePAZCount() + \ newInv.responseFIRCount() + \ newInv.responsePolynomialCount() + \ newInv.responseFAPCount() + \ newInv.responseIIRCount() objCount += resCount + decCount + newInv.dataloggerCount() + \ newInv.sensorCount() # Copy data extents objOut = newInv if len(extents) > 0: objCount += 1 da = DataModel.DataAvailability() for e in extents: da.add(DataModel.DataExtent(e)) objOut = ExportObjectList() objOut.append(newInv) objOut.append(da) sink = utils.Sink(req) if not exp.write(sink, objOut): return False Logging.debug("%s: returned %iNet, %iSta, %iLoc, %iCha, " \ "%iDL, %iDec, %iSen, %iRes, %iDAExt (total objects/" \ "bytes: %i/%i) " % (ro.service, newInv.networkCount(), staCount, locCount, chaCount, newInv.dataloggerCount(), decCount, newInv.sensorCount(), resCount, extCount, objCount, sink.written)) utils.accessLog(req, ro, http.OK, sink.written, None) return True
def _copyReferences(newInv, req, objCount, inv, ro, dataloggers, sensors, maxObj): responses = set() decCount = 0 # datalogger for i in xrange(inv.dataloggerCount()): if req._disconnected: return None logger = inv.datalogger(i) if logger.publicID() not in dataloggers: continue newLogger = DataModel.Datalogger(logger) newInv.add(newLogger) # decimations are only needed for responses if ro.includeRes: for j in xrange(logger.decimationCount()): decimation = logger.decimation(j) newLogger.add(DataModel.Decimation(decimation)) # collect response ids filterStr = "" try: filterStr = decimation.analogueFilterChain().content() + " " except ValueError: pass try: filterStr += decimation.digitalFilterChain().content() except ValueError: pass for resp in filterStr.split(): responses.add(resp) decCount += newLogger.decimationCount() objCount += newInv.dataloggerCount() + decCount resCount = len(responses) if not HTTP.checkObjects(req, objCount + resCount, maxObj): return None # sensor for i in xrange(inv.sensorCount()): if req._disconnected: return None sensor = inv.sensor(i) if sensor.publicID() not in sensors: continue newSensor = DataModel.Sensor(sensor) newInv.add(newSensor) resp = newSensor.response() if resp: if ro.includeRes: responses.add(resp) else: # no responses: remove response reference to avoid missing # response warning of exporter newSensor.setResponse("") objCount += newInv.sensorCount() resCount = len(responses) if not HTTP.checkObjects(req, objCount + resCount, maxObj): return None # responses if ro.includeRes: if req._disconnected: return None for i in xrange(inv.responsePAZCount()): resp = inv.responsePAZ(i) if resp.publicID() in responses: newInv.add(DataModel.ResponsePAZ(resp)) if req._disconnected: return None for i in xrange(inv.responseFIRCount()): resp = inv.responseFIR(i) if resp.publicID() in responses: newInv.add(DataModel.ResponseFIR(resp)) if req._disconnected: return None for i in xrange(inv.responsePolynomialCount()): resp = inv.responsePolynomial(i) if resp.publicID() in responses: newInv.add(DataModel.ResponsePolynomial(resp)) if req._disconnected: return None for i in xrange(inv.responseFAPCount()): resp = inv.responseFAP(i) if resp.publicID() in responses: newInv.add(DataModel.ResponseFAP(resp)) if req._disconnected: return None for i in xrange(inv.responseIIRCount()): resp = inv.responseIIR(i) if resp.publicID() in responses: newInv.add(DataModel.ResponseIIR(resp)) return decCount
def _processRequestExp(self, req, ro, dbq, exp, ep): objCount = ep.eventCount() maxObj = Application.Instance()._queryObjects if not HTTP.checkObjects(req, objCount, maxObj): return False pickIDs = set() if ro.picks is None: ro.picks = True # add related information for iEvent in xrange(ep.eventCount()): if req._disconnected: return False e = ep.event(iEvent) if self._hideAuthor: self._removeAuthor(e) # eventDescriptions and comments objCount += dbq.loadEventDescriptions(e) if ro.comments: objCount += self._loadComment(dbq, e) if not HTTP.checkObjects(req, objCount, maxObj): return False # origin references: either all or preferred only dbIter = dbq.getObjects(e, DataModel.OriginReference.TypeInfo()) for obj in dbIter: oRef = DataModel.OriginReference.Cast(obj) if oRef is None: continue if ro.allOrigins: e.add(oRef) elif oRef.originID() == e.preferredOriginID(): e.add(oRef) dbIter.close() # TODO: if focal mechanisms are added make sure derived # origin is loaded objCount += e.originReferenceCount() if not HTTP.checkObjects(req, objCount, maxObj): return False # TODO: add focal mechanisms # origins for iORef in xrange(e.originReferenceCount()): if req._disconnected: return False oID = e.originReference(iORef).originID() obj = dbq.getObject(DataModel.Origin.TypeInfo(), oID) o = DataModel.Origin.Cast(obj) if o is None: continue ep.add(o) objCount += 1 if self._hideAuthor: self._removeAuthor(o) # comments if ro.comments: objCount += self._loadComments(dbq, o) if not HTTP.checkObjects(req, objCount, maxObj): return False # magnitudes dbIter = dbq.getObjects(oID, DataModel.Magnitude.TypeInfo()) for obj in dbIter: mag = DataModel.Magnitude.Cast(obj) if mag is None: continue if ro.allMags: o.add(mag) elif mag.publicID() == e.preferredMagnitudeID(): o.add(mag) dbIter.close() if self._hideAuthor: self._removeAuthor(mag) objCount += o.magnitudeCount() if ro.comments: for iMag in xrange(o.magnitudeCount()): objCount += self._loadComments(dbq, o.magnitude(iMag)) if not HTTP.checkObjects(req, objCount, maxObj): return False # TODO station magnitudes, amplitudes # - added pick id for each pick referenced by amplitude # arrivals if ro.arrivals: objCount += dbq.loadArrivals(o) if self._removeAuthor: for iArrival in xrange(o.arrivalCount()): self._removeAuthor(o.arrival(iArrival)) # collect pick IDs if requested if ro.picks: for iArrival in xrange(o.arrivalCount()): pickIDs.add(o.arrival(iArrival).pickID()) if not HTTP.checkObjects(req, objCount, maxObj): return False # picks if pickIDs: objCount += len(pickIDs) if not HTTP.checkObjects(req, objCount, maxObj): return False for pickID in pickIDs: obj = dbq.getObject(DataModel.Pick.TypeInfo(), pickID) pick = DataModel.Pick.Cast(obj) if pick is not None: if self._hideAuthor: self._removeAuthor(pick) if ro.comments: objCount += self._loadComments(dbq, pick) ep.add(pick) if not HTTP.checkObjects(req, objCount, maxObj): return False # write response sink = utils.Sink(req) if not exp.write(sink, ep): return False Logging.debug("%s: returned %i events and %i origins (total " \ "objects/bytes: %i/%i)" % (ro.service, ep.eventCount(), ep.originCount(), objCount, sink.written)) utils.accessLog(req, ro, http.OK, sink.written, None) return True
def _processRequest(self, req, ro, dbq, exp): if req._disconnected: return False DataModel.PublicObject.SetRegistrationEnabled(False) maxObj = Application.Instance()._queryObjects # query event(s) ep = DataModel.EventParameters() if ro.eventIDs: for eID in ro.eventIDs: event = dbq.getEventByPublicID(eID) event = DataModel.Event.Cast(event) if event: ep.add(event) else: self._findEvents(ep, ro, dbq) if ep.eventCount() == 0: msg = "No matching events found" utils.writeTS(req, HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro)) return False objCount = ep.eventCount() Logging.debug("events found: %i" % objCount) if not HTTP.checkObjects(req, objCount, maxObj): return False pickIDs = set() # add related information for iEvent in xrange(ep.eventCount()): e = ep.event(iEvent) eID = e.publicID() # eventDescriptions and comments objCount += dbq.loadEventDescriptions(e) if ro.comments: objCount += dbq.loadComments(e) if not HTTP.checkObjects(req, objCount, maxObj): return False # origin references: either all or preferred only dbIter = dbq.getObjects(e, DataModel.OriginReference.TypeInfo()) for obj in dbIter: oRef = DataModel.OriginReference.Cast(obj) if oRef is None: continue if ro.allOrigins: e.add(oRef) elif oRef.originID() == e.preferredOriginID(): e.add(oRef) dbIter.close() objCount += e.originReferenceCount() # TODO: load FocalMechanismReferences??? if not HTTP.checkObjects(req, objCount, maxObj): return False # origins for iORef in xrange(e.originReferenceCount()): oID = e.originReference(iORef).originID() obj = dbq.getObject(DataModel.Origin.TypeInfo(), oID) o = DataModel.Origin.Cast(obj) if o is None: continue ep.add(o) objCount += 1 # comments if ro.comments: objCount += dbq.loadComments(o) if not HTTP.checkObjects(req, objCount, maxObj): return False # magnitudes dbIter = dbq.getObjects(oID, DataModel.Magnitude.TypeInfo()) for obj in dbIter: mag = DataModel.Magnitude.Cast(obj) if mag is None: continue if ro.allMags: o.add(mag) elif mag.publicID() == e.preferredMagnitudeID(): o.add(mag) dbIter.close() objCount += o.magnitudeCount() if ro.comments: for iMag in xrange(o.magnitudeCount()): objCount += dbq.loadComments(o.magnitude(iMag)) if not HTTP.checkObjects(req, objCount, maxObj): return False # arrivals if ro.arrivals: objCount += dbq.loadArrivals(o) # collect pick IDs if requested if ro.picks: for iArrival in xrange(o.arrivalCount()): pickIDs.add(o.arrival(iArrival).pickID()) if not HTTP.checkObjects(req, objCount, maxObj): return False # picks if pickIDs: objCount += len(pickIDs) if not HTTP.checkObjects(req, objCount, maxObj): return False for pickID in pickIDs: obj = dbq.getObject(DataModel.Pick.TypeInfo(), pickID) pick = DataModel.Pick.Cast(obj) if pick is not None: ep.add(pick) if ro.output == "csv": req.setHeader("Content-Type", "text/plain") else: req.setHeader("Content-Type", "application/xml") sink = utils.Sink(req) if not exp.write(sink, ep): return False Logging.notice("%s: returned %i events and %i origins (total " \ "objects/bytes: %i/%i)" % (ro.service, ep.eventCount(), ep.originCount(), objCount, sink.written)) utils.accessLog(req, ro, http.OK, sink.written, None) return True
def _processRequestExp(self, req, ro, dbq, exp, ep): objCount = ep.eventCount() maxObj = Application.Instance()._queryObjects if not HTTP.checkObjects(req, objCount, maxObj): return False pickIDs = set() if ro.picks is None: ro.picks = True # add related information for iEvent in xrange(ep.eventCount()): if req._disconnected: return False e = ep.event(iEvent) if self._hideAuthor: self._removeAuthor(e) # eventDescriptions and comments objCount += dbq.loadEventDescriptions(e) if ro.comments: objCount += self._loadComments(dbq, e) if not HTTP.checkObjects(req, objCount, maxObj): return False # origin references: either all or preferred only dbIter = dbq.getObjects(e, DataModel.OriginReference.TypeInfo()) for obj in dbIter: oRef = DataModel.OriginReference.Cast(obj) if oRef is None: continue if ro.allOrigins: e.add(oRef) elif oRef.originID() == e.preferredOriginID(): e.add(oRef) dbIter.close() # TODO: if focal mechanisms are added make sure derived # origin is loaded objCount += e.originReferenceCount() if not HTTP.checkObjects(req, objCount, maxObj): return False # TODO: add focal mechanisms # origins for iORef in xrange(e.originReferenceCount()): if req._disconnected: return False oID = e.originReference(iORef).originID() obj = dbq.getObject(DataModel.Origin.TypeInfo(), oID) o = DataModel.Origin.Cast(obj) if o is None: continue ep.add(o) objCount += 1 if self._hideAuthor: self._removeAuthor(o) # comments if ro.comments: objCount += self._loadComments(dbq, o) if not HTTP.checkObjects(req, objCount, maxObj): return False # magnitudes dbIter = dbq.getObjects(oID, DataModel.Magnitude.TypeInfo()) for obj in dbIter: mag = DataModel.Magnitude.Cast(obj) if mag is None: continue if ro.allMags: o.add(mag) elif mag.publicID() == e.preferredMagnitudeID(): o.add(mag) dbIter.close() if self._hideAuthor: self._removeAuthor(mag) objCount += o.magnitudeCount() if ro.comments: for iMag in xrange(o.magnitudeCount()): objCount += self._loadComments(dbq, o.magnitude(iMag)) if not HTTP.checkObjects(req, objCount, maxObj): return False # TODO station magnitudes, amplitudes # - added pick id for each pick referenced by amplitude # arrivals if ro.arrivals: objCount += dbq.loadArrivals(o) if self._removeAuthor: for iArrival in xrange(o.arrivalCount()): self._removeAuthor(o.arrival(iArrival)) # collect pick IDs if requested if ro.picks: for iArrival in xrange(o.arrivalCount()): pickIDs.add(o.arrival(iArrival).pickID()) if not HTTP.checkObjects(req, objCount, maxObj): return False # picks if pickIDs: objCount += len(pickIDs) if not HTTP.checkObjects(req, objCount, maxObj): return False for pickID in pickIDs: obj = dbq.getObject(DataModel.Pick.TypeInfo(), pickID) pick = DataModel.Pick.Cast(obj) if pick is not None: if self._hideAuthor: self._removeAuthor(pick) if ro.comments: objCount += self._loadComments(dbq, pick) ep.add(pick) if not HTTP.checkObjects(req, objCount, maxObj): return False # write response sink = utils.Sink(req) if not exp.write(sink, ep): return False Logging.debug("%s: returned %i events and %i origins (total " \ "objects/bytes: %i/%i)" % (ro.service, ep.eventCount(), ep.originCount(), objCount, sink.written)) utils.accessLog(req, ro, http.OK, sink.written, None) return True
def _processRequestExp(self, req, ro, exp): if req._disconnected: return False staCount, locCount, chaCount, objCount = 0, 0, 0, 0 DataModel.PublicObject.SetRegistrationEnabled(False) newInv = DataModel.Inventory() dataloggers, sensors = set(), set() skipRestricted = not self._allowRestricted or (ro.restricted is not None and not ro.restricted) levelNet = not ro.includeSta levelSta = ro.includeSta and not ro.includeCha # iterate over inventory networks for net in ro.networkIter(self._inv, levelNet): if req._disconnected: return False if skipRestricted and utils.isRestricted(net): continue newNet = DataModel.Network(net) # iterate over inventory stations of current network for sta in ro.stationIter(net, levelSta): if req._disconnected: return False if skipRestricted and utils.isRestricted(sta): continue if not HTTP.checkObjects(req, objCount, self._maxObj): return False if ro.includeCha: numCha, numLoc, d, s = self._processStation(newNet, sta, ro, skipRestricted) if numCha > 0: locCount += numLoc chaCount += numCha objCount += numLoc + numCha if not HTTP.checkObjects(req, objCount, self._maxObj): return False dataloggers |= d sensors |= s elif self._matchStation(sta, ro): if ro.includeSta: newNet.add(DataModel.Station(sta)) else: # no station output requested: one matching station # is sufficient to include the network newInv.add(newNet) objCount += 1 break if newNet.stationCount() > 0: newInv.add(newNet) staCount += newNet.stationCount() objCount += staCount + 1 # Return 204 if no matching inventory was found if newInv.networkCount() == 0: msg = "no matching inventory found" data = HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro) if data: utils.writeTS(req, data) return True # Copy references (dataloggers, responses, sensors) decCount, resCount = 0, 0 if ro.includeCha: decCount = self._copyReferences(newInv, req, objCount, self._inv, ro, dataloggers, sensors, self._maxObj) if decCount is None: return False else: resCount = newInv.responsePAZCount() + newInv.responseFIRCount() + newInv.responsePolynomialCount() objCount += resCount + decCount + newInv.dataloggerCount() + newInv.sensorCount() sink = utils.Sink(req) if not exp.write(sink, newInv): return False Logging.notice( "%s: returned %iNet, %iSta, %iLoc, %iCha, " "%iDL, %iDec, %iSen, %iRes (total objects/bytes: " "%i/%i) " % ( ro.service, newInv.networkCount(), staCount, locCount, chaCount, newInv.dataloggerCount(), decCount, newInv.sensorCount(), resCount, objCount, sink.written, ) ) utils.accessLog(req, ro, http.OK, sink.written, None) return True
def _copyReferences(newInv, req, objCount, inv, ro, dataloggers, sensors): respPAZ, respFIR, respPoly = set(), set(), set() maxObj = Application.Instance()._queryObjects decCount = 0 # datalogger for i in xrange(inv.dataloggerCount()): logger = inv.datalogger(i) if logger.publicID() not in dataloggers: continue newLogger = DataModel.Datalogger(logger) newInv.add(newLogger) # decimations are only needed for responses if ro.includeRes: for i in xrange(logger.decimationCount()): decimation = logger.decimation(i) newLogger.add(DataModel.Decimation(logger.decimation(i))) # collect response ids filterStr = "" try: filterStr = decimation.analogueFilterChain().content() except ValueException: pass try: filterStr += decimation.digitalFilterChain().content() except ValueException: pass for resp in filterStr.split(): if resp.startswith("ResponsePAZ"): respPAZ.add(resp) elif resp.startswith("ResponseFIR"): respFIR.add(resp) elif resp.startswith("ResponsePolynomial"): respPoly.add(resp) decCount += newLogger.decimationCount() objCount += newInv.dataloggerCount() + decCount resCount = len(respPAZ) + len(respFIR) + len(respPoly) if not HTTP.checkObjects(req, objCount + resCount, maxObj): return None # sensor for i in xrange(inv.sensorCount()): sensor = inv.sensor(i) if sensor.publicID() not in sensors: continue newSensor = DataModel.Sensor(sensor) newInv.add(newSensor) resp = newSensor.response() if resp: if ro.includeRes: if resp.startswith("ResponsePAZ"): respPAZ.add(resp) elif resp.startswith("ResponseFIR"): respFIR.add(resp) elif resp.startswith("ResponsePolynomial"): respPoly.add(resp) else: # no responses: remove response reference to avoid missing # response warning of exporter newSensor.setResponse("") objCount += newInv.sensorCount() resCount = len(respPAZ) + len(respFIR) + len(respPoly) if not HTTP.checkObjects(req, objCount + resCount, maxObj): return None # responses if ro.includeRes: for i in xrange(inv.responsePAZCount()): resp = inv.responsePAZ(i) if resp.publicID() in respPAZ: newInv.add(DataModel.ResponsePAZ(resp)) for i in xrange(inv.responseFIRCount()): resp = inv.responseFIR(i) if resp.publicID() in respFIR: newInv.add(DataModel.ResponseFIR(resp)) for i in xrange(inv.responsePolynomialCount()): resp = inv.responsePolynomial(i) if resp.publicID() in respPoly: newInv.add(DataModel.ResponsePolynomial(resp)) return decCount
def _processRequest(self, req, ro, exp): if req._disconnected: return False DataModel.PublicObject.SetRegistrationEnabled(False) maxObj = Application.Instance()._queryObjects staCount, locCount, chaCount, objCount = 0, 0, 0, 0 DataModel.PublicObject.SetRegistrationEnabled(False) inv = Application.Instance()._inv newInv = DataModel.Inventory() filterChannel = ro.channel and (ro.channel.loc or ro.channel.cha) dataloggers, sensors = set(), set() # iterate over inventory networks for net in utils.networkIter(inv, ro): if not ro.restricted and net.restricted(): continue newNet = DataModel.Network(net) # iterate over inventory stations of current network for sta in utils.stationIter(net, ro, matchGeo=True): if not ro.restricted and sta.restricted(): continue if not HTTP.checkObjects(req, objCount, maxObj): return False if ro.includeCha: numCha, numLoc, d, s = self._processStation(newNet, sta, ro) if numCha > 0: locCount += numLoc chaCount += numCha objCount += numLoc + numCha if not HTTP.checkObjects(req, objCount, maxObj): return False dataloggers |= d sensors |= s elif self._matchStation(sta, ro): if ro.includeSta: newNet.add(DataModel.Station(sta)) else: # no station output requested: one matching station is # sufficient to include the network newInv.add(newNet) objCount += 1 break if newNet.stationCount() > 0: newInv.add(newNet) staCount += newNet.stationCount() objCount += staCount + 1 # Return 204 if no matching inventory was found if newInv.networkCount() == 0: utils.writeTS(req, HTTP.renderErrorPage(req, http.NO_CONTENT, "No matching inventory found", ro)) return False # Copy references (dataloggers, responses, sensors) decCount, resCount = 0, 0 if ro.includeCha: decCount = self._copyReferences(newInv, req, objCount, inv, ro, dataloggers, sensors) if decCount is None: return False else: resCount = newInv.responsePAZCount() + \ newInv.responseFIRCount() + \ newInv.responsePolynomialCount() objCount += resCount + decCount + newInv.dataloggerCount() + \ newInv.sensorCount() req.setHeader("Content-Type", "application/xml") sink = utils.Sink(req) if not exp.write(sink, newInv): return False Logging.notice("%s: returned %iNet, %iSta, %iLoc, %iCha, " \ "%iDL, %iDec, %iSen, %iRes (total objects/bytes: " \ "%i/%i) " % (ro.service, newInv.networkCount(), staCount, locCount, chaCount, newInv.dataloggerCount(), decCount, newInv.sensorCount(), resCount, objCount, sink.written)) utils.accessLog(req, ro, http.OK, sink.written, None) return True