Ejemplo n.º 1
0
	def resumeProducing(self):
		rec = None

		try: rec = self.rsInput.next()
		except Exception, e: Logging.warning("%s" % str(e))

		if self.written == 0:
			# read first record to test if any data exists at all
			if not rec:
				msg = "no waveform data found"
				data = HTTP.renderErrorPage(self.req, http.NO_CONTENT, msg, self.ro)
				if data:
					self.req.write(data)
				self.req.unregisterProducer()
				self.req.finish()
				return

			self.req.setHeader('Content-Type', 'application/vnd.fdsn.mseed')
			self.req.setHeader('Content-Disposition', "attachment; " \
			                   "filename=%s" % self.fileName)

		if not rec:
			self.req.unregisterProducer()
			Logging.notice("%s: returned %i bytes of mseed data" % (
			               self.ro.service, self.written))
			utils.accessLog(self.req, self.ro, http.OK, self.written, None)
			self.req.finish()
			return

		data = rec.raw().str()
		self.req.write(data)
		self.written += len(data)
Ejemplo n.º 2
0
	def _loadInventory(self):
		Logging.notice("loading inventory")
		dbr = DataModel.DatabaseReader(self.database())
		self._inv = DataModel.Inventory()

		# Load networks and stations
		staCount = 0
		for i in xrange(dbr.loadNetworks(self._inv)):
			staCount += dbr.load(self._inv.network(i))
		Logging.debug("loaded %i stations from %i networks" % (
		              staCount, self._inv.networkCount()))

		# Load sensors, skip calibrations (not needed by StationXML exporter)
		Logging.debug("loaded %i sensors" % dbr.loadSensors(self._inv))

		# Load datalogger and its decimations, skip calibrations (not needed by
		# StationXML exporter)
		deciCount = 0
		for i in xrange(dbr.loadDataloggers(self._inv)):
			deciCount += dbr.loadDecimations(self._inv.datalogger(i))
		Logging.debug("loaded %i decimations from %i dataloggers" % (
		              deciCount, self._inv.dataloggerCount()))

		# Load responses
		resPAZCount = dbr.loadResponsePAZs(self._inv)
		resFIRCount = dbr.loadResponseFIRs(self._inv)
		resPolCount = dbr.loadResponsePolynomials(self._inv)
		resCount = resPAZCount + resFIRCount + resPolCount
		Logging.debug("loaded %i responses (PAZ: %i, FIR: %i, Poly: %i)" % (
		              resCount, resPAZCount, resFIRCount, resPolCount))
		Logging.info("inventory loaded")
Ejemplo n.º 3
0
	def addUser(self, name, attributes, expires, data):
		try:
			password = self.__users[name][0]

		except KeyError:
			bl = " (blacklisted)" if name in self.__blacklist else ""
			Logging.notice("registering %s%s %s" % (name, bl, data))
			password = base64.urlsafe_b64encode(os.urandom(12))

		attributes['blacklisted'] = name in self.__blacklist
		self.__users[name] = (password, attributes, expires)
		return password
Ejemplo n.º 4
0
    def addUser(self, name, attributes, expires, data):
        try:
            password = self.__users[name][0]

        except KeyError:
            bl = " (blacklisted)" if name in self.__blacklist else ""
            Logging.notice("registering %s%s %s" % (name, bl, data))
            password = base64.urlsafe_b64encode(os.urandom(12))

        attributes['blacklisted'] = name in self.__blacklist
        self.__users[name] = (password, attributes, expires)
        return password
Ejemplo n.º 5
0
	def run(self):
		modeStr = None
		if self._evaluationMode is not None:
			modeStr = DataModel.EEvaluationModeNames.name(self._evaluationMode)
		whitelistStr = "<None>"
		if self._eventTypeWhitelist is not None:
			whitelistStr = ", ".join(self._eventTypeWhitelist)
		blacklistStr = "<None>"
		if self._eventTypeBlacklist is not None:
			blacklistStr = ", ".join(self._eventTypeBlacklist)
		Logging.notice("\n" \
		               "configuration read:\n" \
		               "  serve\n" \
		               "    dataselect    : %s\n" \
		               "    event         : %s\n" \
		               "    station       : %s\n" \
		               "  listenAddress   : %s\n" \
		               "  port            : %i\n" \
		               "  connections     : %i\n" \
		               "  htpasswd        : %s\n" \
		               "  accessLog       : %s\n" \
		               "  queryObjects    : %i\n" \
		               "  realtimeGap     : %s\n" \
		               "  samples (M)     : %s\n" \
		               "  allowRestricted : %s\n" \
		               "  hideAuthor      : %s\n" \
		               "  evaluationMode  : %s\n" \
		               "  eventType\n" \
		               "    whitelist     : %s\n" \
		               "    blacklist     : %s\n" % (
		               self._serveDataSelect, self._serveEvent,
		               self._serveStation, self._listenAddress, self._port,
		               self._connections, self._htpasswd, self._accessLogFile,
		               self._queryObjects, self._realtimeGap, self._samplesM,
		               self._allowRestricted, self._hideAuthor, modeStr,
		               whitelistStr, blacklistStr))

		if not self._serveDataSelect and not self._serveEvent and \
		   not self._serveStation:
			Logging.error("all services disabled through configuration")
			return False

		# access logger if requested
		if self._accessLogFile:
			self._accessLog = Log(self._accessLogFile)

		# load inventory needed by DataSelect and Station service
		if self._serveDataSelect or self._serveStation:
			self._loadInventory()

		DataModel.PublicObject.SetRegistrationEnabled(False)

		shareDir = os.path.join(Environment.Instance().shareDir(), 'fdsnws')

		# Overwrite/set mime type of *.wadl and *.xml documents. Instead of
		# using the official types defined in /etc/mime.types 'application/xml'
		# is used as enforced by the FDSNWS spec.
		static.File.contentTypes['.wadl'] = 'application/xml'
		static.File.contentTypes['.xml'] = 'application/xml'

		# create resource tree /fdsnws/...
		root = ListingResource()

		fileName = os.path.join(shareDir, 'favicon.ico')
		fileRes = static.File(fileName, 'image/x-icon')
		fileRes.childNotFound = NoResource()
		fileRes.isLeaf = True
		root.putChild('favicon.ico', fileRes)

		prefix = ListingResource()
		root.putChild('fdsnws', prefix)

		# right now service version is shared by all services
		serviceVersion = ServiceVersion()

		# dataselect
		if self._serveDataSelect:
			dataselect = ListingResource()
			prefix.putChild('dataselect', dataselect)
			dataselect1 = DirectoryResource(os.path.join(shareDir, 'dataselect.html'))
			dataselect.putChild('1', dataselect1)

			dataselect1.putChild('query', FDSNDataSelect())
			msg = 'authorization for restricted time series data required'
			authSession = self._getAuthSessionWrapper(FDSNDataSelectRealm(), msg)
			dataselect1.putChild('queryauth', authSession)
			dataselect1.putChild('version', serviceVersion)
			fileRes = static.File(os.path.join(shareDir, 'dataselect.wadl'))
			fileRes.childNotFound = NoResource()
			dataselect1.putChild('application.wadl', fileRes)

		# event
		if self._serveEvent:
			event = ListingResource()
			prefix.putChild('event', event)
			event1 = DirectoryResource(os.path.join(shareDir, 'event.html'))
			event.putChild('1', event1)

			event1.putChild('query', FDSNEvent(self._hideAuthor,
			                                   self._evaluationMode,
			                                   self._eventTypeWhitelist,
			                                   self._eventTypeBlacklist))
			fileRes = static.File(os.path.join(shareDir, 'catalogs.xml'))
			fileRes.childNotFound = NoResource()
			event1.putChild('catalogs', fileRes)
			fileRes = static.File(os.path.join(shareDir, 'contributors.xml'))
			fileRes.childNotFound = NoResource()
			event1.putChild('contributors', fileRes)
			event1.putChild('version', serviceVersion)
			fileRes = static.File(os.path.join(shareDir, 'event.wadl'))
			fileRes.childNotFound = NoResource()
			event1.putChild('application.wadl', fileRes)

		# station
		if self._serveStation:
			station = ListingResource()
			prefix.putChild('station', station)
			station1 = DirectoryResource(os.path.join(shareDir, 'station.html'))
			station.putChild('1', station1)

			station1.putChild('query', FDSNStation(self._inv, self._allowRestricted, self._queryObjects))
			station1.putChild('version', serviceVersion)
			fileRes = static.File(os.path.join(shareDir, 'station.wadl'))
			fileRes.childNotFound = NoResource()
			station1.putChild('application.wadl', fileRes)

		retn = False
		try:
			# start listen for incoming request
			reactor.listenTCP(self._port, Site(root), self._connections,
			                  self._listenAddress)

			# start processing
			Logging.info("start listening")
			log.addObserver(logSC3)

			reactor.run()
			retn = True
		except Exception, e:
			Logging.error(str(e))
Ejemplo n.º 6
0
				self.shared     = None
				self.netClass   = None
				self.archive    = None

		# read filter configuration from INI file
		filter = []
		includeRuleDefined = False
		try:
			import ConfigParser
		except ImportError, ie:
			Logging.error("could not load 'ConfigParser' Python module")
			return False

		try:
			cp = ConfigParser.ConfigParser()
			Logging.notice("reading inventory filter file: %s" % fileName)
			cp.readfp(open(fileName, 'r'))
			if len(cp.sections()) == 0:
				return True

			# check for mandatory code attribute
			for sectionName in cp.sections():
				code = ""
				try:
					code = cp.get(sectionName, "code")
				except:
					Logging.error("missing 'code' attribute in section %s of " \
					              "inventory filter file %s" % (
					              sectionName, fileName))
					return False
Ejemplo n.º 7
0
                self.shared = None
                self.netClass = None
                self.archive = None

        # read filter configuration from INI file
        filter = []
        includeRuleDefined = False
        try:
            import ConfigParser
        except ImportError, ie:
            Logging.error("could not load 'ConfigParser' Python module")
            return False

        try:
            cp = ConfigParser.ConfigParser()
            Logging.notice("reading inventory filter file: %s" % fileName)
            cp.readfp(open(fileName, 'r'))
            if len(cp.sections()) == 0:
                return True

            # check for mandatory code attribute
            for sectionName in cp.sections():
                code = ""
                try:
                    code = cp.get(sectionName, "code")
                except:
                    Logging.error("missing 'code' attribute in section %s of " \
                                  "inventory filter file %s" % (
                                  sectionName, fileName))
                    return False
Ejemplo n.º 8
0
	def _processRequest(self, req, ro, dbq, exp):
		if req._disconnected:
			return False

		DataModel.PublicObject.SetRegistrationEnabled(False)
		maxObj = Application.Instance()._queryObjects

		# query event(s)
		ep = DataModel.EventParameters()
		if ro.eventIDs:
			for eID in ro.eventIDs:
				event = dbq.getEventByPublicID(eID)
				event = DataModel.Event.Cast(event)
				if event:
					ep.add(event)
		else:
			self._findEvents(ep, ro, dbq)

		if ep.eventCount() == 0:
			msg = "No matching events found"
			utils.writeTS(req,
			              HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro))
			return False

		objCount = ep.eventCount()
		Logging.debug("events found: %i" % objCount)
		if not HTTP.checkObjects(req, objCount, maxObj): return False

		pickIDs = set()

		# add related information
		for iEvent in xrange(ep.eventCount()):
			e = ep.event(iEvent)
			eID = e.publicID()

			# eventDescriptions and comments
			objCount += dbq.loadEventDescriptions(e)
			if ro.comments:
				objCount += dbq.loadComments(e)
			if not HTTP.checkObjects(req, objCount, maxObj): return False

			# origin references: either all or preferred only
			dbIter = dbq.getObjects(e, DataModel.OriginReference.TypeInfo())
			for obj in dbIter:
				oRef = DataModel.OriginReference.Cast(obj)
				if oRef is None:
					continue
				if ro.allOrigins:
					e.add(oRef)
				elif oRef.originID() == e.preferredOriginID():
					e.add(oRef)
					dbIter.close()
			objCount += e.originReferenceCount()
			# TODO: load FocalMechanismReferences???
			if not HTTP.checkObjects(req, objCount, maxObj): return False

			# origins
			for iORef in xrange(e.originReferenceCount()):
				oID = e.originReference(iORef).originID()
				obj = dbq.getObject(DataModel.Origin.TypeInfo(), oID)
				o = DataModel.Origin.Cast(obj)
				if o is None:
					continue

				ep.add(o)
				objCount += 1

				# comments
				if ro.comments:
					objCount += dbq.loadComments(o)
				if not HTTP.checkObjects(req, objCount, maxObj): return False

				# magnitudes
				dbIter = dbq.getObjects(oID, DataModel.Magnitude.TypeInfo())
				for obj in dbIter:
					mag = DataModel.Magnitude.Cast(obj)
					if mag is None:
						continue
					if ro.allMags:
						o.add(mag)
					elif mag.publicID() == e.preferredMagnitudeID():
						o.add(mag)
						dbIter.close()
				objCount += o.magnitudeCount()
				if ro.comments:
					for iMag in xrange(o.magnitudeCount()):
						objCount += dbq.loadComments(o.magnitude(iMag))
				if not HTTP.checkObjects(req, objCount, maxObj): return False

				# arrivals
				if ro.arrivals:
					objCount += dbq.loadArrivals(o)

					# collect pick IDs if requested
					if ro.picks:
						for iArrival in xrange(o.arrivalCount()):
							pickIDs.add(o.arrival(iArrival).pickID())

				if not HTTP.checkObjects(req, objCount, maxObj): return False

		# picks
		if pickIDs:
			objCount += len(pickIDs)
			if not HTTP.checkObjects(req, objCount, maxObj): return False
			for pickID in pickIDs:
				obj = dbq.getObject(DataModel.Pick.TypeInfo(), pickID)
				pick = DataModel.Pick.Cast(obj)
				if pick is not None:
					ep.add(pick)


		if ro.output == "csv":
			req.setHeader("Content-Type", "text/plain")
		else:
			req.setHeader("Content-Type", "application/xml")
		sink = utils.Sink(req)
		if not exp.write(sink, ep):
			return False

		Logging.notice("%s: returned %i events and %i origins (total " \
		               "objects/bytes: %i/%i)" % (ro.service, ep.eventCount(),
		               ep.originCount(), objCount, sink.written))
		utils.accessLog(req, ro, http.OK, sink.written, None)
		return True
Ejemplo n.º 9
0
	def _processRequest(self, req, ro, exp):
		if req._disconnected:
			return False

		maxObj = Application.Instance()._queryObjects
		staCount, chaCount, locCount, objCount = 0, 0, 0, 0

		DataModel.PublicObject.SetRegistrationEnabled(False)
		inv = Application.Instance()._inv
		newInv = DataModel.Inventory()
		filterChannel = ro.channel and (ro.channel.loc or ro.channel.cha)

		# iterate over inventory networks
		for net in utils.networkIter(inv, ro):
			if not utils.checkObjects(req, objCount, maxObj): return False
			newNet = DataModel.Network(net)

			# iterate over inventory stations of current network
			for sta in utils.stationIter(net, ro):
				if ro.includeCha:
					numCha, numLoc = self._processStation(newNet, sta, ro)
					if numCha > 0:
						chaCount += numCha
						locCount += numLoc
						objCount += numCha + numLoc
						if not utils.checkObjects(req, objCount, maxObj):
							return False
				elif self._matchStation(sta, ro):
					if ro.includeSta:
						newNet.add(DataModel.Station(sta))
					else:
						# no station output requested: one matching station is
						# sufficient to include the network
						newInv.add(newNet)
						objCount += 1
						break

			if newNet.stationCount() > 0:
				newInv.add(newNet)
				staCount += newNet.stationCount()
				objCount += staCount + 1

		# Return 404 if no matching station was found
		if newInv.networkCount() == 0:
			utils.writeTS(req, HTTP.renderErrorPage(req, http.NOT_FOUND,
			                                        "No stations found"))
			return False

		# Copy references (if object limit allows to do so)
		if ro.includeCha:
			objCount += self._chaLevelCount
		if ro.includeRes:
			objCount += self._resLevelCount
		if not utils.checkObjects(req, objCount, maxObj): return False
		self._copyReferences(newInv, inv, ro)

		req.setHeader("Content-Type", "text/xml")
		if not exp.write(utils.Sink(req), newInv):
			return False

		Logging.notice("WS-Station: Returned %i networks, %i stations and %i " \
		               "streams (total objects: %i)" % (newInv.networkCount(),
		               staCount, chaCount, objCount))
		return True
Ejemplo n.º 10
0
    def _processRequestExp(self, req, ro, exp):
        if req._disconnected:
            return False

        staCount, locCount, chaCount, objCount = 0, 0, 0, 0

        DataModel.PublicObject.SetRegistrationEnabled(False)
        newInv = DataModel.Inventory()
        dataloggers, sensors = set(), set()

        skipRestricted = not self._allowRestricted or (ro.restricted is not None and not ro.restricted)
        levelNet = not ro.includeSta
        levelSta = ro.includeSta and not ro.includeCha

        # iterate over inventory networks
        for net in ro.networkIter(self._inv, levelNet):
            if req._disconnected:
                return False
            if skipRestricted and utils.isRestricted(net):
                continue
            newNet = DataModel.Network(net)

            # iterate over inventory stations of current network
            for sta in ro.stationIter(net, levelSta):
                if req._disconnected:
                    return False
                if skipRestricted and utils.isRestricted(sta):
                    continue
                if not HTTP.checkObjects(req, objCount, self._maxObj):
                    return False
                if ro.includeCha:
                    numCha, numLoc, d, s = self._processStation(newNet, sta, ro, skipRestricted)
                    if numCha > 0:
                        locCount += numLoc
                        chaCount += numCha
                        objCount += numLoc + numCha
                        if not HTTP.checkObjects(req, objCount, self._maxObj):
                            return False
                        dataloggers |= d
                        sensors |= s
                elif self._matchStation(sta, ro):
                    if ro.includeSta:
                        newNet.add(DataModel.Station(sta))
                    else:
                        # no station output requested: one matching station
                        # is sufficient to include the network
                        newInv.add(newNet)
                        objCount += 1
                        break

            if newNet.stationCount() > 0:
                newInv.add(newNet)
                staCount += newNet.stationCount()
                objCount += staCount + 1

                # Return 204 if no matching inventory was found
        if newInv.networkCount() == 0:
            msg = "no matching inventory found"
            data = HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro)
            if data:
                utils.writeTS(req, data)
            return True

            # Copy references (dataloggers, responses, sensors)
        decCount, resCount = 0, 0
        if ro.includeCha:
            decCount = self._copyReferences(newInv, req, objCount, self._inv, ro, dataloggers, sensors, self._maxObj)
            if decCount is None:
                return False
            else:
                resCount = newInv.responsePAZCount() + newInv.responseFIRCount() + newInv.responsePolynomialCount()
                objCount += resCount + decCount + newInv.dataloggerCount() + newInv.sensorCount()

        sink = utils.Sink(req)
        if not exp.write(sink, newInv):
            return False

        Logging.notice(
            "%s: returned %iNet, %iSta, %iLoc, %iCha, "
            "%iDL, %iDec, %iSen, %iRes (total objects/bytes: "
            "%i/%i) "
            % (
                ro.service,
                newInv.networkCount(),
                staCount,
                locCount,
                chaCount,
                newInv.dataloggerCount(),
                decCount,
                newInv.sensorCount(),
                resCount,
                objCount,
                sink.written,
            )
        )
        utils.accessLog(req, ro, http.OK, sink.written, None)
        return True
Ejemplo n.º 11
0
                                sr,
                                stream.start().toString(df),
                                end,
                            )
                            lineCount += 1

                            # Return 204 if no matching inventory was found
        if lineCount == 0:
            msg = "no matching inventory found"
            data = HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro)
            if data:
                utils.writeTS(req, data)
            return False

        utils.writeTS(req, data)
        Logging.notice("%s: returned %i lines (total bytes: %i)" % (ro.service, lineCount, len(data)))
        utils.accessLog(req, ro, http.OK, len(data), None)
        return True

        # ---------------------------------------------------------------------------
        # Checks if at least one location and channel combination matches the
        # request options

    @staticmethod
    def _matchStation(sta, ro):
        # No filter: return true immediately
        if not ro.channel or (not ro.channel.loc and not ro.channel.cha):
            return True

        for loc in ro.locationIter(sta, False):
            if not ro.channel.cha and not ro.time:
Ejemplo n.º 12
0
	def _processRequestText(self, req, ro, dbq, ep):
		lineCount = 0

		line = "#EventID|Time|Latitude|Longitude|Depth/km|Author|Catalog|" \
		       "Contributor|ContributorID|MagType|Magnitude|MagAuthor|" \
		       "EventLocationName\n"
		df = "%FT%T.%f"
		req.write(line)
		byteCount = len(line)

		# add related information
		for iEvent in xrange(ep.eventCount()):
			e = ep.event(iEvent)
			eID = e.publicID()

			# query for preferred origin
			obj = dbq.getObject(DataModel.Origin.TypeInfo(),
			                    e.preferredOriginID())
			o = DataModel.Origin.Cast(obj)
			if o is None:
				Logging.warning("preferred origin of event '%s' not found: %s" % (
				                eID, e.preferredOriginID()))
				continue

			# depth
			try: depth = str(o.depth().value())
			except ValueException: depth = ''

			# author
			try: author = o.creationInfo().author()
			except ValueException: author = ''

			# contributor
			try: contrib = e.creationInfo().agencyID()
			except ValueException: contrib = ''

			# query for preferred magnitude (if any)
			mType, mVal, mAuthor = '', '', ''
			if e.preferredMagnitudeID():
				obj = dbq.getObject(DataModel.Magnitude.TypeInfo(),
				                    e.preferredMagnitudeID())
				m = DataModel.Magnitude.Cast(obj)
				if m is not None:
					mType = m.type()
					mVal = str(m.magnitude().value())
					try: mAuthor = m.creationInfo().author()
					except ValueException: pass

			# event description
			dbq.loadEventDescriptions(e)
			region = ''
			for i in xrange(e.eventDescriptionCount()):
				ed = e.eventDescription(i)
				if ed.type() == DataModel.REGION_NAME:
					region = ed.text()
					break

			if req._disconnected:
				return False
			line = "%s|%s|%f|%f|%s|%s||%s|%s|%s|%s|%s|%s\n" % (
			       eID, o.time().value().toString(df), o.latitude().value(),
			       o.longitude().value(), depth, author, contrib, eID,
			       mType, mVal, mAuthor, region)
			req.write(line)
			lineCount +=1
			byteCount += len(line)

		# write response
		Logging.notice("%s: returned %i events (total bytes: %i) " % (
		               ro.service, lineCount, byteCount))
		utils.accessLog(req, ro, http.OK, byteCount, None)
		return True
Ejemplo n.º 13
0
	def _processRequestExp(self, req, ro, dbq, exp, ep):
		objCount = ep.eventCount()
		maxObj = Application.Instance()._queryObjects

		if not HTTP.checkObjects(req, objCount, maxObj):
			return False

		pickIDs = set()
		if ro.picks is None:
			ro.picks = True

		# add related information
		for iEvent in xrange(ep.eventCount()):
			if req._disconnected:
				return False
			e = ep.event(iEvent)
			if self._hideAuthor:
				self._removeAuthor(e)

			# eventDescriptions and comments
			objCount += dbq.loadEventDescriptions(e)
			if ro.comments:
				objCount += self._loadComment(dbq, e)
			if not HTTP.checkObjects(req, objCount, maxObj):
				return False

			# origin references: either all or preferred only
			dbIter = dbq.getObjects(e, DataModel.OriginReference.TypeInfo())
			for obj in dbIter:
				oRef = DataModel.OriginReference.Cast(obj)
				if oRef is None:
					continue
				if ro.allOrigins:
					e.add(oRef)
				elif oRef.originID() == e.preferredOriginID():
					e.add(oRef)
					dbIter.close()
				# TODO: if focal mechanisms are added make sure derived
				# origin is loaded

			objCount += e.originReferenceCount()

			if not HTTP.checkObjects(req, objCount, maxObj):
				return False

			# TODO: add focal mechanisms

			# origins
			for iORef in xrange(e.originReferenceCount()):
				if req._disconnected:
					return False
				oID = e.originReference(iORef).originID()
				obj = dbq.getObject(DataModel.Origin.TypeInfo(), oID)
				o = DataModel.Origin.Cast(obj)
				if o is None:
					continue

				ep.add(o)
				objCount += 1
				if self._hideAuthor:
					self._removeAuthor(o)

				# comments
				if ro.comments:
					objCount += self._loadComments(dbq, o)
				if not HTTP.checkObjects(req, objCount, maxObj):
					return False

				# magnitudes
				dbIter = dbq.getObjects(oID, DataModel.Magnitude.TypeInfo())
				for obj in dbIter:
					mag = DataModel.Magnitude.Cast(obj)
					if mag is None:
						continue
					if ro.allMags:
						o.add(mag)
					elif mag.publicID() == e.preferredMagnitudeID():
						o.add(mag)
						dbIter.close()

					if self._hideAuthor:
						self._removeAuthor(mag)

				objCount += o.magnitudeCount()
				if ro.comments:
					for iMag in xrange(o.magnitudeCount()):
						objCount += self._loadComments(dbq, o.magnitude(iMag))
				if not HTTP.checkObjects(req, objCount, maxObj):
					return False

				# TODO station magnitudes, amplitudes
				# - added pick id for each pick referenced by amplitude

				# arrivals
				if ro.arrivals:
					objCount += dbq.loadArrivals(o)
					if self._removeAuthor:
						for iArrival in xrange(o.arrivalCount()):
							self._removeAuthor(o.arrival(iArrival))

					# collect pick IDs if requested
					if ro.picks:
						for iArrival in xrange(o.arrivalCount()):
							pickIDs.add(o.arrival(iArrival).pickID())

				if not HTTP.checkObjects(req, objCount, maxObj):
					return False

		# picks
		if pickIDs:
			objCount += len(pickIDs)
			if not HTTP.checkObjects(req, objCount, maxObj):
				return False

			for pickID in pickIDs:
				obj = dbq.getObject(DataModel.Pick.TypeInfo(), pickID)
				pick = DataModel.Pick.Cast(obj)
				if pick is not None:
					if self._hideAuthor:
						self._removeAuthor(pick)
					if ro.comments:
						objCount += self._loadComments(dbq, pick)
					ep.add(pick)
				if not HTTP.checkObjects(req, objCount, maxObj):
					return False

		# write response
		sink = utils.Sink(req)
		if not exp.write(sink, ep):
			return False
		Logging.notice("%s: returned %i events and %i origins (total " \
		               "objects/bytes: %i/%i)" % (ro.service, ep.eventCount(),
		               ep.originCount(), objCount, sink.written))
		utils.accessLog(req, ro, http.OK, sink.written, None)
		return True
Ejemplo n.º 14
0
	def _processRequest(self, req, ro):
		if req._disconnected:
			return False
		xmlOutput = ro.output == "xml"
		if xmlOutput:
			req.setHeader("Content-Type", "text/xml")
			utils.writeTS(req, SX_MSG_HEAD % Time.GMT().toString(TIME_FMT))
		else:
			req.setHeader("Content-Type", "text/plain")
			if ro.output == "query":
				respLine = "net=%s&sta=%s&loc=%s&cha=%s&start=%s&end=%s\n"
			else:
				respLine = "%s %s %s %s %s %s\n"

		streamCount = 0

		# build list of years dependent on start and end time
		years = self._getYearDirs(ro)

		# iterate over inventory networks
		for net in utils.networkIter(Application.Instance()._inv, ro):
			netCode = net.code()

			# iterate over inventory stations of current network
			for sta in utils.stationIter(net, ro):
				if req._disconnected:
					return False

				# restricted: true means all stations, false only non restricted
				if not ro.restricted:
					try: restricted = sta.restricted()
					except: continue
					if restricted:
						continue

				staCode = sta.code()
				xmlStaHeadWritten = False
				streams = self._getStreams(ro, years, netCode, staCode)

				if xmlOutput:
					if len(streams) == 0:
						continue
					utils.writeTS(req, SX_STA_HEAD % (netCode, staCode))
					try: utils.writeTS(req, SX_STA_LAT % sta.latitude())
					except: pass
					try: utils.writeTS(req, SX_STA_LON % sta.longitude())
					except: pass
					try: utils.writeTS(req, SX_STA_ELE % sta.elevation())
					except: pass

				for stream in streams:
					if xmlOutput:
						utils.writeTS(req, SX_CHA % (stream[1], stream[0],
						                   stream[2].toString(TIME_FMT),
						                   stream[3].toString(TIME_FMT)))
					else:
						loc = stream[0] if len(stream[0]) > 0 else "--"
						utils.writeTS(req, respLine % (netCode, staCode, loc,
						              stream[1], stream[2].toString(TIME_FMT),
						              stream[3].toString(TIME_FMT)))

				streamCount += len(streams)

				# end of station: close Station element
				if xmlOutput:
					utils.writeTS(req, SX_STA_FOOT)
		# end of all networks: close StaMesage element
		if xmlOutput:
			utils.writeTS(req, SX_MSG_FOOT)

		Logging.notice("WS-Availability: Returned %i streams" % streamCount)
		return True
Ejemplo n.º 15
0
	def _processRequest(self, req, ro, exp):
		if req._disconnected:
			return False

		DataModel.PublicObject.SetRegistrationEnabled(False)
		maxObj = Application.Instance()._queryObjects
		staCount, locCount, chaCount, objCount = 0, 0, 0, 0

		DataModel.PublicObject.SetRegistrationEnabled(False)
		inv = Application.Instance()._inv
		newInv = DataModel.Inventory()
		filterChannel = ro.channel and (ro.channel.loc or ro.channel.cha)
		dataloggers, sensors = set(), set()

		# iterate over inventory networks
		for net in utils.networkIter(inv, ro):
			if not ro.restricted and net.restricted(): continue
			newNet = DataModel.Network(net)

			# iterate over inventory stations of current network
			for sta in utils.stationIter(net, ro, matchGeo=True):
				if not ro.restricted and sta.restricted(): continue
				if not HTTP.checkObjects(req, objCount, maxObj): return False
				if ro.includeCha:
					numCha, numLoc, d, s = self._processStation(newNet, sta, ro)
					if numCha > 0:
						locCount += numLoc
						chaCount += numCha
						objCount += numLoc + numCha
						if not HTTP.checkObjects(req, objCount, maxObj):
							return False
						dataloggers |= d
						sensors |= s
				elif self._matchStation(sta, ro):
					if ro.includeSta:
						newNet.add(DataModel.Station(sta))
					else:
						# no station output requested: one matching station is
						# sufficient to include the network
						newInv.add(newNet)
						objCount += 1
						break

			if newNet.stationCount() > 0:
				newInv.add(newNet)
				staCount += newNet.stationCount()
				objCount += staCount + 1

		# Return 204 if no matching inventory was found
		if newInv.networkCount() == 0:
			utils.writeTS(req, HTTP.renderErrorPage(req, http.NO_CONTENT,
			              "No matching inventory found", ro))
			return False

		# Copy references (dataloggers, responses, sensors)
		decCount, resCount = 0, 0
		if ro.includeCha:
			decCount = self._copyReferences(newInv, req, objCount, inv, ro,
			           dataloggers, sensors)
			if decCount is None:
				return False
			else:
				resCount = newInv.responsePAZCount() + \
				           newInv.responseFIRCount() + \
				           newInv.responsePolynomialCount()
				objCount += resCount + decCount + newInv.dataloggerCount() + \
				            newInv.sensorCount()

		req.setHeader("Content-Type", "application/xml")
		sink = utils.Sink(req)
		if not exp.write(sink, newInv):
			return False

		Logging.notice("%s: returned %iNet, %iSta, %iLoc, %iCha, " \
		               "%iDL, %iDec, %iSen, %iRes (total objects/bytes: " \
		               "%i/%i) " % (ro.service, newInv.networkCount(), staCount,
		               locCount, chaCount, newInv.dataloggerCount(), decCount,
		               newInv.sensorCount(), resCount, objCount, sink.written))
		utils.accessLog(req, ro, http.OK, sink.written, None)
		return True