Пример #1
0
class FDSNEvent(resource.Resource):
    isLeaf = True

    #---------------------------------------------------------------------------
    def __init__(self,
                 hideAuthor=False,
                 evaluationMode=None,
                 eventTypeWhitelist=None,
                 eventTypeBlacklist=None):
        self._hideAuthor = hideAuthor
        self._evaluationMode = evaluationMode
        self._eventTypeWhitelist = eventTypeWhitelist
        self._eventTypeBlacklist = eventTypeBlacklist

    #---------------------------------------------------------------------------
    def render_GET(self, req):
        # Parse and validate GET parameters
        ro = _EventRequestOptions(req.args)
        try:
            ro.parse()
        except ValueError, e:
            Logging.warning(str(e))
            return HTTP.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)

        # Catalog filter is not supported, any filter value will result in 204
        if ro.catalogs:
            msg = "no matching events found"
            return HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro)

        # updateafter not implemented
        if ro.updatedAfter:
            msg = "filtering based on update time not supported"
            return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro)

        # Exporter, 'None' is used for text output
        if ro.format in ro.VText:
            exp = None
        else:
            exp = Exporter.Create(ro.Exporters[ro.format])
            if exp:
                exp.setFormattedOutput(bool(ro.formatted))
            else:
                msg = "output format '%s' no available, export module '%s' could " \
                      "not be loaded." % (ro.format, ro.Exporters[ro.format])
                return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg,
                                            ro)

        # Create database query
        dbq = DataModel.DatabaseQuery(Application.Instance().database())
        if dbq.hasError():
            msg = "could not connect to database: %s" % dbq.errorMsg()
            return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro)

        # Process request in separate thread
        d = deferToThread(self._processRequest, req, ro, dbq, exp)
        req.notifyFinish().addErrback(utils.onCancel, d)
        d.addBoth(utils.onFinish, req)

        # The request is handled by the deferred object
        return server.NOT_DONE_YET
Пример #2
0
 def __init__(self, inv, bufferSize, access=None, user=None):
     BaseResource.__init__(self, VERSION)
     self._rsURL = Application.Instance().recordStreamURL()
     self.__inv = inv
     self.__access = access
     self.__user = user
     self.__bufferSize = bufferSize
Пример #3
0
    def render_GET(self, req):
        # Parse and validate GET parameters
        ro = _EventRequestOptions()
        try:
            ro.parseGET(req.args)
            ro.parse()
        except ValueError as e:
            Logging.warning(str(e))
            return self.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)

        # Catalog filter is not supported
        if ro.catalogs:
            msg = "catalog filter not supported"
            return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

        if ro.comments and self._hideComments:
            msg = "including of comments not supported"
            return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

        # updateafter not implemented
        if ro.updatedAfter:
            msg = "filtering based on update time not supported"
            return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

        if self._formatList is not None and ro.format not in self._formatList:
            msg = "output format '%s' not available" % ro.format
            return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

        # Exporter, 'None' is used for text output
        if ro.format in ro.VText:
            exp = None
        else:
            exp = Exporter.Create(ro.Exporters[ro.format])
            if exp:
                exp.setFormattedOutput(bool(ro.formatted))
            else:
                msg = "output format '%s' not available, export module '%s' could " \
                      "not be loaded." % (ro.format, ro.Exporters[ro.format])
                return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

        # Create database query
        db = DatabaseInterface.Open(Application.Instance().databaseURI())
        if db is None:
            msg = "could not connect to database"
            return self.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro)

        dbq = DataModel.DatabaseQuery(db)

        # Process request in separate thread
        d = deferToThread(self._processRequest, req, ro, dbq, exp)
        req.notifyFinish().addErrback(utils.onCancel, d)
        d.addBoth(utils.onFinish, req)

        # The request is handled by the deferred object
        return server.NOT_DONE_YET
Пример #4
0
    def _prepareRequest(self, req, ro):
        if ro.availability and not self._daEnabled:
            msg = "including of availability information not supported"
            return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

        if ro.updatedAfter:
            msg = "filtering based on update time not supported"
            return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

        if ro.matchTimeSeries and not self._daEnabled:
            msg = "filtering based on available time series not supported"
            return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

        # load data availability if requested
        dac = None
        if ro.availability or ro.matchTimeSeries:
            dac = Application.Instance().getDACache()
            if dac is None or len(dac.extents()) == 0:
                msg = "no data availabiltiy extent information found"
                return self.renderErrorPage(req, http.NO_CONTENT, msg, ro)

        # Exporter, 'None' is used for text output
        if ro.format in ro.VText:
            if ro.includeRes:
                msg = "response level output not available in text format"
                return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)
            req.setHeader('Content-Type', 'text/plain')
            d = deferToThread(self._processRequestText, req, ro, dac)
        else:
            exp = Exporter.Create(ro.Exporters[ro.format])
            if exp is None:
                msg = "output format '%s' no available, export module '%s' " \
                      "could not be loaded." % (
                          ro.format, ro.Exporters[ro.format])
                return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

            req.setHeader('Content-Type', 'application/xml')
            exp.setFormattedOutput(bool(ro.formatted))
            d = deferToThread(self._processRequestExp, req, ro, exp, dac)

        req.notifyFinish().addErrback(utils.onCancel, d)
        d.addBoth(utils.onFinish, req)

        # The request is handled by the deferred object
        return server.NOT_DONE_YET
Пример #5
0
    def _processRequest(self, req, ro):

        if ro.quality != 'B' and ro.quality != 'M':
            msg = "quality other than 'B' or 'M' not supported"
            return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

        if ro.minimumLength:
            msg = "enforcing of minimum record length not supported"
            return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

        if ro.longestOnly:
            msg = "limitation to longest segment not supported"
            return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

        app = Application.Instance()
        ro._checkTimes(app._realtimeGap)

        maxSamples = None
        if app._samplesM is not None:
            maxSamples = app._samplesM * 1000000
            samples = 0

        trackerList = []

        if app._trackdbEnabled or app._requestLog:
            xff = req.requestHeaders.getRawHeaders("x-forwarded-for")
            if xff:
                userIP = xff[0].split(",")[0].strip()
            else:
                userIP = req.getClientIP()

            clientID = req.getHeader("User-Agent")
            if clientID:
                clientID = clientID[:80]
            else:
                clientID = "fdsnws"

        if app._trackdbEnabled:
            if ro.userName:
                userID = ro.userName
            else:
                userID = app._trackdbDefaultUser

            reqID = 'ws' + str(int(round(time.time() * 1000) - 1420070400000))
            tracker = RequestTrackerDB(clientID, app.connection(), reqID,
                                       "WAVEFORM", userID,
                                       "REQUEST WAVEFORM " + reqID, "fdsnws",
                                       userIP, req.getClientIP())

            trackerList.append(tracker)

        if app._requestLog:
            tracker = app._requestLog.tracker(ro.service, ro.userName, userIP,
                                              clientID)
            trackerList.append(tracker)

        # Open record stream
        rs = _MyRecordStream(self._rsURL, trackerList, self.__bufferSize)

        forbidden = None

        # Add request streams
        # iterate over inventory networks
        for s in ro.streams:
            for net in self._networkIter(s):
                netRestricted = utils.isRestricted(net)
                if not trackerList and netRestricted and not self.__user:
                    forbidden = forbidden or (forbidden is None)
                    continue
                for sta in self._stationIter(net, s):
                    staRestricted = utils.isRestricted(sta)
                    if not trackerList and staRestricted and not self.__user:
                        forbidden = forbidden or (forbidden is None)
                        continue
                    for loc in self._locationIter(sta, s):
                        for cha in self._streamIter(loc, s):
                            start_time = max(cha.start(), s.time.start)

                            try:
                                end_time = min(cha.end(), s.time.end)
                            except ValueError:
                                end_time = s.time.end

                            if (netRestricted or staRestricted
                                    or utils.isRestricted(cha)
                                ) and (not self.__user or
                                       (self.__access
                                        and not self.__access.authorize(
                                            self.__user, net.code(),
                                            sta.code(), loc.code(), cha.code(),
                                            start_time, end_time))):

                                for tracker in trackerList:
                                    net_class = 't' if net.code()[0] \
                                        in "0123456789XYZ" else 'p'
                                    tracker.line_status(
                                        start_time, end_time, net.code(),
                                        sta.code(), cha.code(), loc.code(),
                                        True, net_class, True, [], "fdsnws",
                                        "DENIED", 0, "")

                                forbidden = forbidden or (forbidden is None)
                                continue

                            forbidden = False

                            # enforce maximum sample per request restriction
                            if maxSamples is not None:
                                try:
                                    n = cha.sampleRateNumerator()
                                    d = cha.sampleRateDenominator()
                                except ValueError:
                                    msg = "skipping stream without sampling " \
                                          "rate definition: %s.%s.%s.%s" % (
                                              net.code(), sta.code(),
                                              loc.code(), cha.code())
                                    Logging.warning(msg)
                                    continue

                                # calculate number of samples for requested
                                # time window
                                diffSec = (end_time - start_time).length()
                                samples += int(diffSec * n / d)
                                if samples > maxSamples:
                                    msg = "maximum number of %sM samples " \
                                          "exceeded" % str(app._samplesM)
                                    return self.renderErrorPage(
                                        req, http.REQUEST_ENTITY_TOO_LARGE,
                                        msg, ro)

                            Logging.debug(
                                "adding stream: %s.%s.%s.%s %s - %s" %
                                (net.code(), sta.code(), loc.code(),
                                 cha.code(), start_time.iso(), end_time.iso()))
                            rs.addStream(net.code(), sta.code(), loc.code(),
                                         cha.code(), start_time, end_time,
                                         utils.isRestricted(cha),
                                         sta.archiveNetworkCode())

        if forbidden:
            for tracker in trackerList:
                tracker.volume_status("fdsnws", "DENIED", 0, "")
                tracker.request_status("END", "")

            msg = "access denied"
            return self.renderErrorPage(req, http.FORBIDDEN, msg, ro)

        elif forbidden is None:
            for tracker in trackerList:
                tracker.volume_status("fdsnws", "NODATA", 0, "")
                tracker.request_status("END", "")

            msg = "no metadata found"
            return self.renderErrorPage(req, http.NO_CONTENT, msg, ro)

        # Build output filename
        fileName = Application.Instance()._fileNamePrefix.replace(
            "%time", time.strftime('%Y-%m-%dT%H:%M:%S')) + '.mseed'

        # Create producer for async IO
        prod = _WaveformProducer(req, ro, rs, fileName, trackerList)
        req.registerProducer(prod, True)
        prod.resumeProducing()

        # The request is handled by the deferred object
        return server.NOT_DONE_YET
Пример #6
0
    def _findEvents(self, ep, ro, dbq):
        db = Application.Instance().database()

        def _T(name):
            return db.convertColumnName(name)

        def _time(time):
            return db.timeToString(time)

        orderByMag = ro.orderBy and ro.orderBy.startswith('magnitude')
        reqMag = ro.mag or orderByMag
        reqDist = ro.geo and ro.geo.bCircle
        colPID = _T('publicID')
        colTime = _T('time_value')
        colMag = _T('magnitude_value')
        if orderByMag:
            colOrderBy = "m.%s" % colMag
        else:
            colOrderBy = "o.%s" % colTime

        bBox = None
        if ro.geo:
            colLat, colLon = _T('latitude_value'), _T('longitude_value')
            if ro.geo.bBox:
                bBox = ro.geo.bBox
            else:
                bBox = ro.geo.bCircle.calculateBBox()

        # SELECT --------------------------------
        q = "SELECT DISTINCT pe.%s, e.*, %s" % (colPID, colOrderBy)
        if reqDist:  # Great circle distance calculated by Haversine formula
            c = ro.geo.bCircle
            q += ", DEGREES(ACOS(" \
                 "COS(RADIANS(o.%s)) * COS(RADIANS(%s)) * " \
                 "COS(RADIANS(o.%s) - RADIANS(%s)) + SIN(RADIANS(o.%s)) * " \
                 "SIN(RADIANS(%s)))) AS distance" % (
                     colLat, c.lat, colLon, c.lon, colLat, c.lat)

        # FROM ----------------------------------
        q += " FROM Event AS e, PublicObject AS pe" \
             ", Origin AS o, PublicObject AS po"
        if reqMag:
            q += ", Magnitude AS m, PublicObject AS pm"

        # WHERE ---------------------------------
        q += " WHERE e._oid = pe._oid"

        # event type white list filter, defined via configuration and/or request
        # parameters
        types = None
        if self._eventTypeWhitelist and ro.eventTypes:
            types = self._eventTypeWhitelist.intersection(ro.eventTypes)
            if not types:
                Logging.debug('all requested event types filtered by '
                              'configured event type white list')
                return
        elif self._eventTypeWhitelist:
            types = self._eventTypeWhitelist
        elif ro.eventTypes:
            types = ro.eventTypes
        if types is not None:
            allowNull = -1 in types
            types = [x for x in types if x >= 0]

            etqIn = "e.%s IN ('%s')" % (_T('type'), "', '".join(
                DataModel.EEventTypeNames.name(x) for x in types))
            if allowNull:
                etqNull = "e.%s is NULL" % _T('type')
                if types:
                    q += " AND (%s OR %s)" % (etqNull, etqIn)
                else:
                    q += " AND %s" % etqNull
            else:
                q += " AND %s" % etqIn

        # event type black list filter, defined in configuration
        if self._eventTypeBlacklist:
            allowNull = -1 not in self._eventTypeBlacklist
            types = [x for x in self._eventTypeBlacklist if x >= 0]

            etqNotIn = "e.%s NOT IN ('%s')" % (_T('type'), "', '".join(
                DataModel.EEventTypeNames.name(x) for x in types))
            if allowNull:
                etqNull = "e.%s is NULL" % _T('type')
                if types:
                    q += " AND (%s OR %s)" % (etqNull, etqNotIn)
                else:
                    q += " AND %s" % etqNull
            else:
                q += " AND %s" % etqNotIn

        # event agency id filter
        if ro.contributors:
            q += " AND e.%s AND upper(e.%s) IN('%s')" % (
                _T('creationinfo_used'), _T('creationinfo_agencyid'),
                "', '".join(ro.contributors).upper())

        # origin information filter
        q += " AND o._oid = po._oid AND po.%s = e.%s" % (
            colPID, _T('preferredOriginID'))

        # evaluation mode config parameter
        if self._evaluationMode is not None:
            colEvalMode = _T('evaluationMode')
            q += " AND o.%s = '%s'" % (colEvalMode,
                                       DataModel.EEvaluationModeNames.name(
                                           self._evaluationMode))

        # time
        if ro.time:
            colTimeMS = _T('time_value_ms')
            if ro.time.start is not None:
                t = _time(ro.time.start)
                ms = ro.time.start.microseconds()
                q += " AND (o.%s > '%s' OR (o.%s = '%s' AND o.%s >= %i))" % (
                    colTime, t, colTime, t, colTimeMS, ms)
            if ro.time.end is not None:
                t = _time(ro.time.end)
                ms = ro.time.end.microseconds()
                q += " AND (o.%s < '%s' OR (o.%s = '%s' AND o.%s <= %i))" % (
                    colTime, t, colTime, t, colTimeMS, ms)

        # bounding box
        if bBox:
            if bBox.minLat is not None:
                q += " AND o.%s >= %s" % (colLat, bBox.minLat)
            if bBox.maxLat is not None:
                q += " AND o.%s <= %s" % (colLat, bBox.maxLat)
            if bBox.dateLineCrossing():
                q += " AND (o.%s >= %s OR o.%s <= %s)" % (colLon, bBox.minLon,
                                                          colLon, bBox.maxLon)
            else:
                if bBox.minLon is not None:
                    q += " AND o.%s >= %s" % (colLon, bBox.minLon)
                if bBox.maxLon is not None:
                    q += " AND o.%s <= %s" % (colLon, bBox.maxLon)

        # depth
        if ro.depth:
            q += " AND o.%s" % _T("depth_used")
            colDepth = _T('depth_value')
            if ro.depth.min is not None:
                q += " AND o.%s >= %s" % (colDepth, ro.depth.min)
            if ro.depth.max is not None:
                q += " AND o.%s <= %s" % (colDepth, ro.depth.max)

        # updated after
        if ro.updatedAfter:
            t = _time(ro.updatedAfter)
            ms = ro.updatedAfter.microseconds()
            colCTime = _T('creationinfo_creationtime')
            colCTimeMS = _T('creationinfo_creationtime_ms')
            colMTime = _T('creationinfo_modificationtime')
            colMTimeMS = _T('creationinfo_modificationtime_ms')
            tFilter = "(o.%s > '%s' OR (o.%s = '%s' AND o.%s > %i))"

            q += " AND ("
            q += tFilter % (colCTime, t, colCTime, t, colCTimeMS, ms) + " OR "
            q += tFilter % (colMTime, t, colMTime, t, colMTimeMS, ms) + ")"

        # magnitude information filter
        if reqMag:
            q += " AND m._oid = pm._oid AND "
            if ro.mag and ro.mag.type:
                # join magnitude table on oID of origin and magnitude type
                q += "m._parent_oid = o._oid AND m.%s = '%s'" % (
                    _T('type'), dbq.toString(ro.mag.type))
            else:
                # join magnitude table on preferred magnitude id of event
                q += "pm.%s = e.%s" % (colPID, _T('preferredMagnitudeID'))

            if ro.mag and ro.mag.min is not None:
                q += " AND m.%s >= %s" % (colMag, ro.mag.min)
            if ro.mag and ro.mag.max is not None:
                q += " AND m.%s <= %s" % (colMag, ro.mag.max)

        # ORDER BY ------------------------------
        q += " ORDER BY %s" % colOrderBy
        if ro.orderBy and ro.orderBy.endswith('-asc'):
            q += " ASC"
        else:
            q += " DESC"

        # SUBQUERY distance (optional) ----------
        if reqDist:
            q = "SELECT * FROM (%s) AS subquery WHERE distance " % q
            c = ro.geo.bCircle
            if c.minRad is not None:
                q += ">= %s" % c.minRad
            if c.maxRad is not None:
                if c.minRad is not None:
                    q += " AND distance "
                q += "<= %s" % c.maxRad

        # LIMIT/OFFSET --------------------------
        if ro.limit is not None or ro.offset is not None:
            # Postgres allows to omit the LIMIT parameter for offsets, MySQL
            # does not. According to the MySQL manual a very large number should
            # be used for this case.
            l = DBMaxUInt
            if ro.limit is not None:
                l = ro.limit
            q += " LIMIT %i" % l
            if ro.offset is not None:
                q += " OFFSET %i" % ro.offset

        Logging.debug("event query: %s" % q)

        for e in dbq.getObjectIterator(q, DataModel.Event.TypeInfo()):
            ep.add(DataModel.Event.Cast(e))
Пример #7
0
    def _processRequestExp(self, req, ro, dbq, exp, ep):
        objCount = ep.eventCount()
        maxObj = Application.Instance()._queryObjects

        if not self.checkObjects(req, objCount, maxObj):
            return False

        pickIDs = set()
        if ro.picks is None:
            ro.picks = True

        # add related information
        for iEvent in range(ep.eventCount()):
            if req._disconnected:
                return False
            e = ep.event(iEvent)
            if self._hideAuthor:
                self._removeAuthor(e)

            originIDs = set()
            magIDs = set()
            magIDs.add(e.preferredMagnitudeID())

            # eventDescriptions and comments
            objCount += dbq.loadEventDescriptions(e)
            if ro.comments:
                objCount += self._loadComments(dbq, e)
            if not self.checkObjects(req, objCount, maxObj):
                return False

            # origin references: either all or preferred only
            dbIter = dbq.getObjects(e, DataModel.OriginReference.TypeInfo())
            for obj in dbIter:
                oRef = DataModel.OriginReference.Cast(obj)
                if oRef is None:
                    continue
                if ro.allOrigins:
                    e.add(oRef)
                    originIDs.add(oRef.originID())
                elif oRef.originID() == e.preferredOriginID():
                    e.add(oRef)
                    originIDs.add(oRef.originID())
                    dbIter.close()

            objCount += e.originReferenceCount()

            # focalMechanism references: either none, preferred only or all
            if ro.fm or ro.allFMs:
                dbIter = dbq.getObjects(
                    e, DataModel.FocalMechanismReference.TypeInfo())
                for obj in dbIter:
                    fmRef = DataModel.FocalMechanismReference.Cast(obj)
                    if fmRef is None:
                        continue
                    if ro.allFMs:
                        e.add(fmRef)
                    elif fmRef.focalMechanismID(
                    ) == e.preferredFocalMechanismID():
                        e.add(fmRef)
                        dbIter.close()

            objCount += e.focalMechanismReferenceCount()

            if not self.checkObjects(req, objCount, maxObj):
                return False

            # focal mechanisms: process before origins to add derived origin to
            # originID list since it may be missing from origin reference list
            for iFMRef in range(e.focalMechanismReferenceCount()):
                if req._disconnected:
                    return False
                fmID = e.focalMechanismReference(iFMRef).focalMechanismID()
                obj = dbq.getObject(DataModel.FocalMechanism.TypeInfo(), fmID)
                fm = DataModel.FocalMechanism.Cast(obj)
                if fm is None:
                    continue

                ep.add(fm)
                objCount += 1
                if self._hideAuthor:
                    self._removeAuthor(fm)

                # comments
                if ro.comments:
                    objCount += self._loadComments(dbq, fm)

                # momentTensors
                objCount += dbq.loadMomentTensors(fm)

                if not self.checkObjects(req, objCount, maxObj):
                    return False

                for iMT in range(fm.momentTensorCount()):
                    mt = fm.momentTensor(iMT)

                    originIDs.add(mt.derivedOriginID())
                    magIDs.add(mt.momentMagnitudeID())

                    if self._hideAuthor:
                        self._removeAuthor(mt)

                    if ro.comments:
                        for iMT in range(fm.momentTensorCount()):
                            objCount += self._loadComments(dbq, mt)

                    objCount += dbq.loadDataUseds(mt)
                    objCount += dbq.loadMomentTensorPhaseSettings(mt)
                    if ro.staMTs:
                        objCount += dbq.loadMomentTensorStationContributions(
                            mt)
                        for iStaMT in range(
                                mt.momentTensorStationContributionCount()):
                            objCount += dbq.load(
                                mt.momentTensorStationContribution(iStaMT))

                    if not self.checkObjects(req, objCount, maxObj):
                        return False

            # find ID of origin containing preferred Magnitude
            if e.preferredMagnitudeID():
                obj = dbq.getObject(DataModel.Magnitude.TypeInfo(),
                                    e.preferredMagnitudeID())
                m = DataModel.Magnitude.Cast(obj)
                if m is not None:
                    oID = dbq.parentPublicID(m)
                    if oID:
                        originIDs.add(oID)

            # origins
            for oID in sorted(originIDs):
                if req._disconnected:
                    return False
                obj = dbq.getObject(DataModel.Origin.TypeInfo(), oID)
                o = DataModel.Origin.Cast(obj)
                if o is None:
                    continue

                ep.add(o)
                objCount += 1
                if self._hideAuthor:
                    self._removeAuthor(o)

                # comments
                if ro.comments:
                    objCount += self._loadComments(dbq, o)
                if not self.checkObjects(req, objCount, maxObj):
                    return False

                # magnitudes
                dbIter = dbq.getObjects(oID, DataModel.Magnitude.TypeInfo())
                for obj in dbIter:
                    mag = DataModel.Magnitude.Cast(obj)
                    if mag is None:
                        continue
                    if ro.allMags:
                        o.add(mag)
                    elif mag.publicID() in magIDs:
                        o.add(mag)
                        dbIter.close()

                    if self._hideAuthor:
                        self._removeAuthor(mag)

                objCount += o.magnitudeCount()
                if ro.comments:
                    for iMag in range(o.magnitudeCount()):
                        objCount += self._loadComments(dbq, o.magnitude(iMag))
                if not self.checkObjects(req, objCount, maxObj):
                    return False

                # TODO station magnitudes, amplitudes
                # - added pick id for each pick referenced by amplitude

                # arrivals
                if ro.arrivals:
                    objCount += dbq.loadArrivals(o)
                    if self._removeAuthor:
                        for iArrival in range(o.arrivalCount()):
                            self._removeAuthor(o.arrival(iArrival))

                    # collect pick IDs if requested
                    if ro.picks:
                        for iArrival in range(o.arrivalCount()):
                            pickIDs.add(o.arrival(iArrival).pickID())

                if not self.checkObjects(req, objCount, maxObj):
                    return False

        # picks
        if pickIDs:
            objCount += len(pickIDs)
            if not self.checkObjects(req, objCount, maxObj):
                return False

            for pickID in sorted(pickIDs):
                obj = dbq.getObject(DataModel.Pick.TypeInfo(), pickID)
                pick = DataModel.Pick.Cast(obj)
                if pick is not None:
                    if self._hideAuthor:
                        self._removeAuthor(pick)
                    if ro.comments:
                        objCount += self._loadComments(dbq, pick)
                    ep.add(pick)
                if not self.checkObjects(req, objCount, maxObj):
                    return False

        # write response
        sink = utils.Sink(req)
        if not exp.write(sink, ep):
            return False
        Logging.debug("%s: returned %i events and %i origins (total "
                      "objects/chars: %i/%i)" %
                      (ro.service, ep.eventCount(), ep.originCount(), objCount,
                       sink.written))
        utils.accessLog(req, ro, http.OK, sink.written, None)
        return True
Пример #8
0
class FDSNEvent(resource.Resource):
	isLeaf = True

	#---------------------------------------------------------------------------
	def __init__(self, hideAuthor = False, evaluationMode = None,
	             eventTypeWhitelist = None, eventTypeBlacklist = None,
	             formatList = None):
		self._hideAuthor = hideAuthor
		self._evaluationMode = evaluationMode
		self._eventTypeWhitelist = eventTypeWhitelist
		self._eventTypeBlacklist = eventTypeBlacklist
		self._formatList = formatList


	#---------------------------------------------------------------------------
	def render_OPTIONS(self, req):
		req.setHeader('Access-Control-Allow-Origin', '*')
		req.setHeader('Access-Control-Allow-Methods', 'GET, POST, OPTIONS')
		req.setHeader('Access-Control-Allow-Headers',
                      'Accept, Content-Type, X-Requested-With, Origin')
		req.setHeader('Content-Type', 'text/plain')
		return ""


	#---------------------------------------------------------------------------
	def render_GET(self, req):
		# Parse and validate GET parameters
		ro = _EventRequestOptions(req.args)
		try:
			ro.parse()
		except ValueError, e:
			Logging.warning(str(e))
			return HTTP.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)

		# Catalog filter is not supported
		if ro.catalogs:
			msg = "catalog filter not supported"
			return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

		# updateafter not implemented
		if ro.updatedAfter:
			msg = "filtering based on update time not supported"
			return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

		if self._formatList is not None and ro.format not in self._formatList:
			msg = "output format '%s' not available" % ro.format
			return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

		# Exporter, 'None' is used for text output
		if ro.format in ro.VText:
			exp = None
		else:
			exp = Exporter.Create(ro.Exporters[ro.format])
			if exp:
				exp.setFormattedOutput(bool(ro.formatted))
			else:
				msg = "output format '%s' not available, export module '%s' could " \
				      "not be loaded." % (ro.format, ro.Exporters[ro.format])
				return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

		# Create database query
		db = DatabaseInterface.Open(Application.Instance().databaseURI())
		if db is None:
			msg = "could not connect to database: %s" % dbq.errorMsg()
			return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro)

		dbq = DataModel.DatabaseQuery(db)

		# Process request in separate thread
		d = deferToThread(self._processRequest, req, ro, dbq, exp)
		req.notifyFinish().addErrback(utils.onCancel, d)
		d.addBoth(utils.onFinish, req)

		# The request is handled by the deferred object
		return server.NOT_DONE_YET
Пример #9
0
    def _processRequest(self, req, ro):

        if ro.quality != 'B' and ro.quality != 'M':
            msg = "quality other than 'B' or 'M' not supported"
            return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro)

        if ro.minimumLength:
            msg = "enforcing of minimum record length not supported"
            return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro)

        if ro.longestOnly:
            msg = "limitation to longest segment not supported"
            return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro)

        app = Application.Instance()
        ro._checkTimes(app._realtimeGap)

        maxSamples = None
        if app._samplesM is not None:
            maxSamples = app._samplesM * 1000000
            samples = 0

        app = Application.Instance()
        if app._trackdbEnabled:
            userid = ro.userName or app._trackdbDefaultUser
            reqid = 'ws' + str(int(round(time.time() * 1000) - 1420070400000))
            xff = req.requestHeaders.getRawHeaders("x-forwarded-for")
            if xff:
                userIP = xff[0].split(",")[0].strip()
            else:
                userIP = req.getClientIP()

            tracker = RequestTrackerDB("fdsnws", app.connection(), reqid,
                                       "WAVEFORM", userid,
                                       "REQUEST WAVEFORM " + reqid, "fdsnws",
                                       userIP, req.getClientIP())

        else:
            tracker = None

        # Open record stream
        rs = _MyRecordStream(self._rsURL, tracker, self.__bufferSize)

        # Add request streams
        # iterate over inventory networks
        for s in ro.streams:
            for net in self._networkIter(s):
                for sta in self._stationIter(net, s):
                    for loc in self._locationIter(sta, s):
                        for cha in self._streamIter(loc, s):
                            try:
                                start_time = max(cha.start(), s.time.start)

                            except Exception:
                                start_time = s.time.start

                            try:
                                end_time = min(cha.end(), s.time.end)

                            except Exception:
                                end_time = s.time.end

                            if utils.isRestricted(cha) and \
                                (not self.__user or (self.__access and
                                 not self.__access.authorize(self.__user,
                                     net.code(), sta.code(), loc.code(),
                                     cha.code(), start_time, end_time))):
                                continue

                            # enforce maximum sample per request restriction
                            if maxSamples is not None:
                                try:
                                    n = cha.sampleRateNumerator()
                                    d = cha.sampleRateDenominator()
                                except ValueError:
                                    msg = "skipping stream without sampling " \
                                          "rate definition: %s.%s.%s.%s" % (
                                          net.code(), sta.code(), loc.code(),
                                          cha.code())
                                    Logging.warning(msg)
                                    continue

                                # calculate number of samples for requested
                                # time window
                                diffSec = (end_time - start_time).length()
                                samples += int(diffSec * n / d)
                                if samples > maxSamples:
                                    msg = "maximum number of %sM samples " \
                                          "exceeded" % str(app._samplesM)
                                    return HTTP.renderErrorPage(
                                        req, http.REQUEST_ENTITY_TOO_LARGE,
                                        msg, ro)

                            Logging.debug("adding stream: %s.%s.%s.%s %s - %s" \
                                          % (net.code(), sta.code(), loc.code(),
                                             cha.code(), start_time.iso(),
                                             end_time.iso()))
                            rs.addStream(net.code(), sta.code(), loc.code(),
                                         cha.code(), start_time, end_time,
                                         utils.isRestricted(cha),
                                         sta.archiveNetworkCode())

        # Build output filename
        fileName = Application.Instance()._fileNamePrefix.replace(
            "%time", time.strftime('%Y-%m-%dT%H:%M:%S')) + '.mseed'

        # Create producer for async IO
        prod = _WaveformProducer(req, ro, rs, fileName, tracker)
        req.registerProducer(prod, True)
        prod.resumeProducing()

        # The request is handled by the deferred object
        return server.NOT_DONE_YET
Пример #10
0
    def _processRequestExp(self, req, ro, dbq, exp, ep):
        objCount = ep.eventCount()
        maxObj = Application.Instance()._queryObjects

        if not HTTP.checkObjects(req, objCount, maxObj):
            return False

        pickIDs = set()
        if ro.picks is None:
            ro.picks = True

        # add related information
        for iEvent in xrange(ep.eventCount()):
            if req._disconnected:
                return False
            e = ep.event(iEvent)
            if self._hideAuthor:
                self._removeAuthor(e)

            # eventDescriptions and comments
            objCount += dbq.loadEventDescriptions(e)
            if ro.comments:
                objCount += self._loadComments(dbq, e)
            if not HTTP.checkObjects(req, objCount, maxObj):
                return False

            # origin references: either all or preferred only
            dbIter = dbq.getObjects(e, DataModel.OriginReference.TypeInfo())
            for obj in dbIter:
                oRef = DataModel.OriginReference.Cast(obj)
                if oRef is None:
                    continue
                if ro.allOrigins:
                    e.add(oRef)
                elif oRef.originID() == e.preferredOriginID():
                    e.add(oRef)
                    dbIter.close()
                # TODO: if focal mechanisms are added make sure derived
                # origin is loaded

            objCount += e.originReferenceCount()

            if not HTTP.checkObjects(req, objCount, maxObj):
                return False

            # TODO: add focal mechanisms

            # origins
            for iORef in xrange(e.originReferenceCount()):
                if req._disconnected:
                    return False
                oID = e.originReference(iORef).originID()
                obj = dbq.getObject(DataModel.Origin.TypeInfo(), oID)
                o = DataModel.Origin.Cast(obj)
                if o is None:
                    continue

                ep.add(o)
                objCount += 1
                if self._hideAuthor:
                    self._removeAuthor(o)

                # comments
                if ro.comments:
                    objCount += self._loadComments(dbq, o)
                if not HTTP.checkObjects(req, objCount, maxObj):
                    return False

                # magnitudes
                dbIter = dbq.getObjects(oID, DataModel.Magnitude.TypeInfo())
                for obj in dbIter:
                    mag = DataModel.Magnitude.Cast(obj)
                    if mag is None:
                        continue
                    if ro.allMags:
                        o.add(mag)
                    elif mag.publicID() == e.preferredMagnitudeID():
                        o.add(mag)
                        dbIter.close()

                    if self._hideAuthor:
                        self._removeAuthor(mag)

                objCount += o.magnitudeCount()
                if ro.comments:
                    for iMag in xrange(o.magnitudeCount()):
                        objCount += self._loadComments(dbq, o.magnitude(iMag))
                if not HTTP.checkObjects(req, objCount, maxObj):
                    return False

                # TODO station magnitudes, amplitudes
                # - added pick id for each pick referenced by amplitude

                # arrivals
                if ro.arrivals:
                    objCount += dbq.loadArrivals(o)
                    if self._removeAuthor:
                        for iArrival in xrange(o.arrivalCount()):
                            self._removeAuthor(o.arrival(iArrival))

                    # collect pick IDs if requested
                    if ro.picks:
                        for iArrival in xrange(o.arrivalCount()):
                            pickIDs.add(o.arrival(iArrival).pickID())

                if not HTTP.checkObjects(req, objCount, maxObj):
                    return False

        # picks
        if pickIDs:
            objCount += len(pickIDs)
            if not HTTP.checkObjects(req, objCount, maxObj):
                return False

            for pickID in pickIDs:
                obj = dbq.getObject(DataModel.Pick.TypeInfo(), pickID)
                pick = DataModel.Pick.Cast(obj)
                if pick is not None:
                    if self._hideAuthor:
                        self._removeAuthor(pick)
                    if ro.comments:
                        objCount += self._loadComments(dbq, pick)
                    ep.add(pick)
                if not HTTP.checkObjects(req, objCount, maxObj):
                    return False

        # write response
        sink = utils.Sink(req)
        if not exp.write(sink, ep):
            return False
        Logging.debug("%s: returned %i events and %i origins (total " \
                       "objects/bytes: %i/%i)" % (ro.service, ep.eventCount(),
                       ep.originCount(), objCount, sink.written))
        utils.accessLog(req, ro, http.OK, sink.written, None)
        return True
Пример #11
0
 def __init__(self, inv, access=None, user=None):
     resource.Resource.__init__(self)
     self._rsURL = Application.Instance().recordStreamURL()
     self.__inv = inv
     self.__access = access
     self.__user = user
Пример #12
0
def accessLog(req, ro, code, length, err):
    logger = Application.Instance()._accessLog
    if logger is None:
        return

    logger.log(AccessLogEntry(req, ro, code, length, err))
Пример #13
0
    def _processRequest(self, req, ro):

        if ro.quality != 'B' and ro.quality != 'M':
            msg = "quality other than 'B' or 'M' not supported"
            return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro)

        if ro.minimumLength:
            msg = "enforcing of minimum record length not supported"
            return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro)

        if ro.longestOnly:
            msg = "limitation to longest segment not supported"
            return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro)

        app = Application.Instance()
        ro._checkTimes(app._realtimeGap)

        # Open record stream
        rs = RecordStream.Open(self._rsURL)
        if rs is None:
            msg = "could not open record stream"
            return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro)

        maxSamples = None
        if app._samplesM is not None:
            maxSamples = app._samplesM * 1000000
            samples = 0

        # Add request streams
        # iterate over inventory networks
        inv = Application.Instance()._inv
        for s in ro.streams:
            for net in self._networkIter(inv, s):
                if ro.userName is None and utils.isRestricted(net):
                    continue
                for sta in self._stationIter(net, s):
                    if ro.userName is None and utils.isRestricted(sta):
                        continue
                    for loc in self._locationIter(sta, s):
                        for cha in self._streamIter(loc, s):
                            # enforce maximum sample per request restriction
                            if maxSamples is not None:
                                try:
                                    n = cha.sampleRateNumerator()
                                    d = cha.sampleRateDenominator()
                                except ValueException:
                                    msg = "skipping stream without sampling " \
                                          "rate definition: %s.%s.%s.%s" % (
                                          net.code(), sta.code(), loc.code(),
                                          cha.code())
                                    Logging.warning(msg)
                                    continue

                                # calculate number of samples for requested
                                # time window
                                diffSec = (s.time.end - s.time.start).length()
                                samples += int(diffSec * n / d)
                                if samples > maxSamples:
                                    msg = "maximum number of %sM samples " \
                                          "exceeded" % str(app._samplesM)
                                    return HTTP.renderErrorPage(
                                        req, http.REQUEST_ENTITY_TOO_LARGE,
                                        msg, ro)

                            Logging.debug("adding stream: %s.%s.%s.%s %s - %s" \
                                          % (net.code(), sta.code(), loc.code(),
                                             cha.code(), s.time.start.iso(),
                                             s.time.end.iso()))
                            rs.addStream(net.code(), sta.code(), loc.code(),
                                         cha.code(), s.time.start, s.time.end)

        # Build output filename
        fileName = Application.Instance()._fileNamePrefix + '.mseed'

        # Create producer for async IO
        req.registerProducer(_WaveformProducer(req, ro, rs, fileName), False)

        # The request is handled by the deferred object
        return server.NOT_DONE_YET
Пример #14
0
 def __init__(self, userName=None):
     resource.Resource.__init__(self)
     self._rsURL = Application.Instance().recordStreamURL()
     self.userName = userName
Пример #15
0
def accessLog(req, ro, code, length, err):
    logger = Application.Instance()._accessLog  # pylint: disable=W0212
    if logger is None:
        return

    logger.log(AccessLogEntry(req, ro, code, length, err))