Exemple #1
0
 def __init__(self, inv, bufferSize, access=None, user=None):
     BaseResource.__init__(self, VERSION)
     self._rsURL = Application.Instance().recordStreamURL()
     self.__inv = inv
     self.__access = access
     self.__user = user
     self.__bufferSize = bufferSize
Exemple #2
0
    def _prepareRequest(self, req, ro):

        dac = Application.Instance().getDACache()

        if ro.format == ro.VFormatJSON:
            contentType = 'application/json'
            extension = 'json'
        elif ro.format == ro.VFormatGeoCSV:
            contentType = 'text/csv'
            extension = 'csv'
        else:
            contentType = 'text/plain'
            extension = 'txt'

        req.setHeader('Content-Type', contentType)
        req.setHeader(
            'Content-Disposition',
            'inline; filename=fdsnws-ext-availability_{0}.{1}'.format(
                Time.GMT().iso(), extension))

        d = deferToThread(self._processRequest, req, ro, dac)

        req.notifyFinish().addErrback(utils.onCancel, d)
        d.addBoth(utils.onFinish, req)

        # The request is handled by the deferred object
        return server.NOT_DONE_YET
Exemple #3
0
    def render_GET(self, req):
        # Parse and validate GET parameters
        ro = _EventRequestOptions()
        try:
            ro.parseGET(req.args)
            ro.parse()
        except ValueError as e:
            seiscomp.logging.warning(str(e))
            return self.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)

        # Catalog filter is not supported
        if ro.catalogs:
            msg = "catalog filter not supported"
            return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

        if ro.comments and self._hideComments:
            msg = "including of comments not supported"
            return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

        # updateafter not implemented
        if ro.updatedAfter:
            msg = "filtering based on update time not supported"
            return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

        if self._formatList is not None and ro.format not in self._formatList:
            msg = "output format '%s' not available" % ro.format
            return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

        # Exporter, 'None' is used for text output
        if ro.format in ro.VText:
            exp = None
        else:
            exp = Exporter.Create(ro.Exporters[ro.format])
            if exp:
                exp.setFormattedOutput(bool(ro.formatted))
            else:
                msg = "output format '%s' not available, export module '%s' could " \
                      "not be loaded." % (ro.format, ro.Exporters[ro.format])
                return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

        # Create database query
        db = DatabaseInterface.Open(Application.Instance().databaseURI())
        if db is None:
            msg = "could not connect to database"
            return self.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro)

        dbq = seiscomp.datamodel.DatabaseQuery(db)

        # Process request in separate thread
        d = deferToThread(self._processRequest, req, ro, dbq, exp)
        req.notifyFinish().addErrback(utils.onCancel, d)
        d.addBoth(utils.onFinish, req)

        # The request is handled by the deferred object
        return server.NOT_DONE_YET
Exemple #4
0
    def _prepareRequest(self, req, ro):
        if ro.availability and not self._daEnabled:
            msg = "including of availability information not supported"
            return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

        if ro.updatedAfter:
            msg = "filtering based on update time not supported"
            return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

        if ro.matchTimeSeries and not self._daEnabled:
            msg = "filtering based on available time series not supported"
            return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

        # load data availability if requested
        dac = None
        if ro.availability or ro.matchTimeSeries:
            dac = Application.Instance().getDACache()
            if dac is None or len(dac.extents()) == 0:
                msg = "no data availabiltiy extent information found"
                return self.renderErrorPage(req, http.NO_CONTENT, msg, ro)

        # Exporter, 'None' is used for text output
        if ro.format in ro.VText:
            if ro.includeRes:
                msg = "response level output not available in text format"
                return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)
            req.setHeader('Content-Type', 'text/plain')
            d = deferToThread(self._processRequestText, req, ro, dac)
        else:
            exp = Exporter.Create(ro.Exporters[ro.format])
            if exp is None:
                msg = "output format '%s' no available, export module '%s' " \
                      "could not be loaded." % (
                          ro.format, ro.Exporters[ro.format])
                return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

            req.setHeader('Content-Type', 'application/xml')
            exp.setFormattedOutput(bool(ro.formatted))
            d = deferToThread(self._processRequestExp, req, ro, exp, dac)

        req.notifyFinish().addErrback(utils.onCancel, d)
        d.addBoth(utils.onFinish, req)

        # The request is handled by the deferred object
        return server.NOT_DONE_YET
Exemple #5
0
    def _findEvents(self, ep, ro, dbq):
        db = Application.Instance().database()

        def _T(name):
            return db.convertColumnName(name)

        def _time(time):
            return db.timeToString(time)

        orderByMag = ro.orderBy and ro.orderBy.startswith('magnitude')
        reqMag = ro.mag or orderByMag
        reqMagType = ro.mag and ro.mag.type
        reqDist = ro.geo and ro.geo.bCircle
        colPID = _T('publicID')
        colTime = _T('time_value')
        colMag = _T('magnitude_value')
        if orderByMag:
            colOrderBy = "m.%s" % colMag
        else:
            colOrderBy = "o.%s" % colTime

        bBox = None
        if ro.geo:
            colLat, colLon = _T('latitude_value'), _T('longitude_value')
            if ro.geo.bBox:
                bBox = ro.geo.bBox
            else:
                bBox = ro.geo.bCircle.calculateBBox()

        # SELECT --------------------------------
        q = "SELECT DISTINCT pe.%s AS %s, e.*, %s AS colOrderBy" % (
            colPID, colPID, colOrderBy)
        if reqDist:  # Great circle distance calculated by Haversine formula
            c = ro.geo.bCircle
            q += ", DEGREES(ACOS(" \
                 "COS(RADIANS(o.%s)) * COS(RADIANS(%s)) * " \
                 "COS(RADIANS(o.%s) - RADIANS(%s)) + SIN(RADIANS(o.%s)) * " \
                 "SIN(RADIANS(%s)))) AS distance" % (
                     colLat, c.lat, colLon, c.lon, colLat, c.lat)

        # FROM ----------------------------------
        q += " FROM Event AS e, PublicObject AS pe" \
             ", Origin AS o, PublicObject AS po"
        if reqMag:
            q += ", Magnitude AS m"
            if not reqMagType:
                # the preferred magnitude is used if not specific magnitude type
                # is requested
                q += ", PublicObject AS pm"

        # WHERE ---------------------------------
        q += " WHERE e._oid = pe._oid"

        # event type white list filter, defined via configuration and/or request
        # parameters
        types = None
        if self._eventTypeWhitelist and ro.eventTypes:
            types = self._eventTypeWhitelist.intersection(ro.eventTypes)
            if not types:
                seiscomp.logging.debug(
                    'all requested event types filtered by configured event '
                    'type white list')
                return
        elif self._eventTypeWhitelist:
            types = self._eventTypeWhitelist
        elif ro.eventTypes:
            types = ro.eventTypes
        if types is not None:
            allowNull = -1 in types
            types = [x for x in types if x >= 0]

            etqIn = "e.%s IN ('%s')" % (_T('type'), "', '".join(
                seiscomp.datamodel.EEventTypeNames.name(x) for x in types))
            if allowNull:
                etqNull = "e.%s is NULL" % _T('type')
                if types:
                    q += " AND (%s OR %s)" % (etqNull, etqIn)
                else:
                    q += " AND %s" % etqNull
            else:
                q += " AND %s" % etqIn

        # event type black list filter, defined in configuration
        if self._eventTypeBlacklist:
            allowNull = -1 not in self._eventTypeBlacklist
            types = [x for x in self._eventTypeBlacklist if x >= 0]

            etqNotIn = "e.%s NOT IN ('%s')" % (_T('type'), "', '".join(
                seiscomp.datamodel.EEventTypeNames.name(x) for x in types))
            if allowNull:
                etqNull = "e.%s is NULL" % _T('type')
                if types:
                    q += " AND (%s OR %s)" % (etqNull, etqNotIn)
                else:
                    q += " AND %s" % etqNull
            else:
                q += " AND %s" % etqNotIn

        # event agency id filter
        if ro.contributors:
            q += " AND e.%s AND upper(e.%s) IN('%s')" % (
                _T('creationinfo_used'), _T('creationinfo_agencyid'),
                "', '".join(ro.contributors).upper())

        # origin information filter
        q += " AND o._oid = po._oid AND po.%s = e.%s" % (
            colPID, _T('preferredOriginID'))

        # evaluation mode config parameter
        if self._evaluationMode is not None:
            colEvalMode = _T('evaluationMode')
            q += " AND o.%s = '%s'" % (
                colEvalMode,
                seiscomp.datamodel.EEvaluationModeNames.name(
                    self._evaluationMode))

        # time
        if ro.time:
            colTimeMS = _T('time_value_ms')
            if ro.time.start is not None:
                t = _time(ro.time.start)
                ms = ro.time.start.microseconds()
                q += " AND (o.%s > '%s' OR (o.%s = '%s' AND o.%s >= %i))" % (
                    colTime, t, colTime, t, colTimeMS, ms)
            if ro.time.end is not None:
                t = _time(ro.time.end)
                ms = ro.time.end.microseconds()
                q += " AND (o.%s < '%s' OR (o.%s = '%s' AND o.%s <= %i))" % (
                    colTime, t, colTime, t, colTimeMS, ms)

        # bounding box
        if bBox:
            if bBox.minLat is not None:
                q += " AND o.%s >= %s" % (colLat, bBox.minLat)
            if bBox.maxLat is not None:
                q += " AND o.%s <= %s" % (colLat, bBox.maxLat)
            if bBox.dateLineCrossing():
                q += " AND (o.%s >= %s OR o.%s <= %s)" % (colLon, bBox.minLon,
                                                          colLon, bBox.maxLon)
            else:
                if bBox.minLon is not None:
                    q += " AND o.%s >= %s" % (colLon, bBox.minLon)
                if bBox.maxLon is not None:
                    q += " AND o.%s <= %s" % (colLon, bBox.maxLon)

        # depth
        if ro.depth:
            q += " AND o.%s" % _T("depth_used")
            colDepth = _T('depth_value')
            if ro.depth.min is not None:
                q += " AND o.%s >= %s" % (colDepth, ro.depth.min)
            if ro.depth.max is not None:
                q += " AND o.%s <= %s" % (colDepth, ro.depth.max)

        # updated after
        if ro.updatedAfter:
            t = _time(ro.updatedAfter)
            ms = ro.updatedAfter.microseconds()
            colCTime = _T('creationinfo_creationtime')
            colCTimeMS = _T('creationinfo_creationtime_ms')
            colMTime = _T('creationinfo_modificationtime')
            colMTimeMS = _T('creationinfo_modificationtime_ms')
            tFilter = "(o.%s > '%s' OR (o.%s = '%s' AND o.%s > %i))"

            q += " AND ("
            q += tFilter % (colCTime, t, colCTime, t, colCTimeMS, ms) + " OR "
            q += tFilter % (colMTime, t, colMTime, t, colMTimeMS, ms) + ")"

        # magnitude information filter
        if reqMag:
            if ro.mag and ro.mag.min is not None:
                q += " AND m.%s >= %s" % (colMag, ro.mag.min)
            if ro.mag and ro.mag.max is not None:
                q += " AND m.%s <= %s" % (colMag, ro.mag.max)

            # default case, no magnitude type filter:
            # join magnitude table on preferred magnitude id of event
            if not reqMagType:
                q += " AND m._oid = pm._oid AND pm.%s = e.%s" % (
                    colPID, _T('preferredMagnitudeID'))

            # magnitude type filter:
            # Specific mag type is searched in magnitudes of preferred origin or
            # in derived origin of moment tensors of preferred focal mechanism.
            else:
                q += " AND m.%s = '%s' AND m._parent_oid " % (
                    _T('type'), dbq.toString(ro.mag.type))

                # For performance reasons the query is split in two parts
                # combined with a UNION statement. The subsequent ORDER BY,
                # LIMIT/OFFSET or distance subquery is carried out on the entire
                # UNION result set.
                q += "= po._oid UNION " + q + "IN (" \
                         "SELECT pdo._oid " \
                         "FROM " \
                             "PublicObject pfm, " \
                             "MomentTensor mt, " \
                             "PublicObject pdo " \
                         "WHERE " \
                             "pfm.%s = e.%s AND " \
                             "mt._parent_oid = pfm._oid AND " \
                             "pdo.%s = mt.%s)" % (
                                 colPID, _T('preferredFocalMechanismID'),
                                 colPID, _T('derivedOriginID'))

        # ORDER BY ------------------------------
        q += " ORDER BY colOrderBy "
        if ro.orderBy and ro.orderBy.endswith('-asc'):
            q += "ASC"
        else:
            q += "DESC"

        # SUBQUERY distance (optional) ----------
        if reqDist:
            q = "SELECT * FROM (%s) AS subquery WHERE distance " % q
            c = ro.geo.bCircle
            if c.minRad is not None:
                q += ">= %s" % c.minRad
            if c.maxRad is not None:
                if c.minRad is not None:
                    q += " AND distance "
                q += "<= %s" % c.maxRad

        # LIMIT/OFFSET --------------------------
        if ro.limit is not None or ro.offset is not None:
            # Postgres allows to omit the LIMIT parameter for offsets, MySQL
            # does not. According to the MySQL manual a very large number should
            # be used for this case.
            l = DBMaxUInt
            if ro.limit is not None:
                l = ro.limit
            q += " LIMIT %i" % l
            if ro.offset is not None:
                q += " OFFSET %i" % ro.offset

        seiscomp.logging.debug("event query: %s" % q)

        for e in dbq.getObjectIterator(q, seiscomp.datamodel.Event.TypeInfo()):
            ep.add(seiscomp.datamodel.Event.Cast(e))
Exemple #6
0
    def _processRequestExp(self, req, ro, dbq, exp, ep):
        objCount = ep.eventCount()
        maxObj = Application.Instance()._queryObjects  #pylint: disable=W0212

        if not self.checkObjects(req, objCount, maxObj):
            return False

        pickIDs = set()
        if ro.picks is None:
            ro.picks = True

        # add related information
        for iEvent in range(ep.eventCount()):
            if req._disconnected:  #pylint: disable=W0212
                return False
            e = ep.event(iEvent)
            if self._hideAuthor:
                self._removeAuthor(e)

            originIDs = set()
            magIDs = set()
            magIDs.add(e.preferredMagnitudeID())

            # eventDescriptions and comments
            objCount += dbq.loadEventDescriptions(e)
            if ro.comments:
                objCount += self._loadComments(dbq, e)
            if not self.checkObjects(req, objCount, maxObj):
                return False

            # origin references: either all or preferred only
            dbIter = dbq.getObjects(
                e, seiscomp.datamodel.OriginReference.TypeInfo())
            for obj in dbIter:
                oRef = seiscomp.datamodel.OriginReference.Cast(obj)
                if oRef is None:
                    continue
                if ro.allOrigins:
                    e.add(oRef)
                    originIDs.add(oRef.originID())
                elif oRef.originID() == e.preferredOriginID():
                    e.add(oRef)
                    originIDs.add(oRef.originID())
                    dbIter.close()

            objCount += e.originReferenceCount()

            # focalMechanism references: either none, preferred only or all
            if ro.fm or ro.allFMs:
                dbIter = dbq.getObjects(
                    e, seiscomp.datamodel.FocalMechanismReference.TypeInfo())
                for obj in dbIter:
                    fmRef = seiscomp.datamodel.FocalMechanismReference.Cast(
                        obj)
                    if fmRef is None:
                        continue
                    if ro.allFMs:
                        e.add(fmRef)
                    elif fmRef.focalMechanismID(
                    ) == e.preferredFocalMechanismID():
                        e.add(fmRef)
                        dbIter.close()

            objCount += e.focalMechanismReferenceCount()

            if not self.checkObjects(req, objCount, maxObj):
                return False

            # focal mechanisms: process before origins to add derived origin to
            # originID list since it may be missing from origin reference list
            for iFMRef in range(e.focalMechanismReferenceCount()):
                if req._disconnected:  #pylint: disable=W0212
                    return False
                fmID = e.focalMechanismReference(iFMRef).focalMechanismID()
                obj = dbq.getObject(
                    seiscomp.datamodel.FocalMechanism.TypeInfo(), fmID)
                fm = seiscomp.datamodel.FocalMechanism.Cast(obj)
                if fm is None:
                    continue

                ep.add(fm)
                objCount += 1
                if self._hideAuthor:
                    self._removeAuthor(fm)

                # comments
                if ro.comments:
                    objCount += self._loadComments(dbq, fm)

                # momentTensors
                objCount += dbq.loadMomentTensors(fm)

                if not self.checkObjects(req, objCount, maxObj):
                    return False

                for iMT in range(fm.momentTensorCount()):
                    mt = fm.momentTensor(iMT)

                    originIDs.add(mt.derivedOriginID())
                    magIDs.add(mt.momentMagnitudeID())

                    if self._hideAuthor:
                        self._removeAuthor(mt)

                    if ro.comments:
                        for _ in range(fm.momentTensorCount()):
                            objCount += self._loadComments(dbq, mt)

                    objCount += dbq.loadDataUseds(mt)
                    objCount += dbq.loadMomentTensorPhaseSettings(mt)
                    if ro.staMTs:
                        objCount += dbq.loadMomentTensorStationContributions(
                            mt)
                        for iStaMT in range(
                                mt.momentTensorStationContributionCount()):
                            objCount += dbq.load(
                                mt.momentTensorStationContribution(iStaMT))

                    if not self.checkObjects(req, objCount, maxObj):
                        return False

            # find ID of origin containing preferred Magnitude
            if e.preferredMagnitudeID():
                obj = dbq.getObject(seiscomp.datamodel.Magnitude.TypeInfo(),
                                    e.preferredMagnitudeID())
                m = seiscomp.datamodel.Magnitude.Cast(obj)
                if m is not None:
                    oID = dbq.parentPublicID(m)
                    if oID:
                        originIDs.add(oID)

            # origins
            for oID in sorted(originIDs):
                if req._disconnected:  #pylint: disable=W0212
                    return False
                obj = dbq.getObject(seiscomp.datamodel.Origin.TypeInfo(), oID)
                o = seiscomp.datamodel.Origin.Cast(obj)
                if o is None:
                    continue

                ep.add(o)
                objCount += 1
                if self._hideAuthor:
                    self._removeAuthor(o)

                # comments
                if ro.comments:
                    objCount += self._loadComments(dbq, o)
                if not self.checkObjects(req, objCount, maxObj):
                    return False

                # magnitudes
                dbIter = dbq.getObjects(
                    oID, seiscomp.datamodel.Magnitude.TypeInfo())
                for obj in dbIter:
                    mag = seiscomp.datamodel.Magnitude.Cast(obj)
                    if mag is None:
                        continue
                    if ro.allMags:
                        o.add(mag)
                    elif mag.publicID() in magIDs:
                        o.add(mag)
                        dbIter.close()

                    if self._hideAuthor:
                        self._removeAuthor(mag)

                objCount += o.magnitudeCount()
                if ro.comments:
                    for iMag in range(o.magnitudeCount()):
                        objCount += self._loadComments(dbq, o.magnitude(iMag))
                if not self.checkObjects(req, objCount, maxObj):
                    return False

                # TODO station magnitudes, amplitudes
                # - added pick id for each pick referenced by amplitude

                # arrivals
                if ro.arrivals:
                    objCount += dbq.loadArrivals(o)
                    if self._hideAuthor:
                        for iArrival in range(o.arrivalCount()):
                            self._removeAuthor(o.arrival(iArrival))

                    # collect pick IDs if requested
                    if ro.picks:
                        for iArrival in range(o.arrivalCount()):
                            pickIDs.add(o.arrival(iArrival).pickID())

                if not self.checkObjects(req, objCount, maxObj):
                    return False

        # picks
        if pickIDs:
            objCount += len(pickIDs)
            if not self.checkObjects(req, objCount, maxObj):
                return False

            for pickID in sorted(pickIDs):
                obj = dbq.getObject(seiscomp.datamodel.Pick.TypeInfo(), pickID)
                pick = seiscomp.datamodel.Pick.Cast(obj)
                if pick is not None:
                    if self._hideAuthor:
                        self._removeAuthor(pick)
                    if ro.comments:
                        objCount += self._loadComments(dbq, pick)
                    ep.add(pick)
                if not self.checkObjects(req, objCount, maxObj):
                    return False

        # write response
        sink = utils.Sink(req)
        if not exp.write(sink, ep):
            return False
        seiscomp.logging.debug("%s: returned %i events and %i origins " \
            "(total objects/chars: %i/%i)" % (
                ro.service, ep.eventCount(), ep.originCount(), objCount, sink.written))
        utils.accessLog(req, ro, http.OK, sink.written, None)
        return True
Exemple #7
0
    def _processRequest(self, req, ro):
        #pylint: disable=W0212

        if ro.quality != 'B' and ro.quality != 'M':
            msg = "quality other than 'B' or 'M' not supported"
            return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

        if ro.minimumLength:
            msg = "enforcing of minimum record length not supported"
            return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

        if ro.longestOnly:
            msg = "limitation to longest segment not supported"
            return self.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

        app = Application.Instance()
        ro._checkTimes(app._realtimeGap)

        maxSamples = None
        if app._samplesM is not None:
            maxSamples = app._samplesM * 1000000
            samples = 0

        trackerList = []

        if app._trackdbEnabled or app._requestLog:
            xff = req.requestHeaders.getRawHeaders("x-forwarded-for")
            if xff:
                userIP = xff[0].split(",")[0].strip()
            else:
                userIP = req.getClientIP()

            clientID = req.getHeader("User-Agent")
            if clientID:
                clientID = clientID[:80]
            else:
                clientID = "fdsnws"

        if app._trackdbEnabled:
            if ro.userName:
                userID = ro.userName
            else:
                userID = app._trackdbDefaultUser

            reqID = 'ws' + str(int(round(time.time() * 1000) - 1420070400000))
            tracker = RequestTrackerDB(clientID, app.connection(), reqID,
                                       "WAVEFORM", userID,
                                       "REQUEST WAVEFORM " + reqID,
                                       "fdsnws", userIP, req.getClientIP())

            trackerList.append(tracker)

        if app._requestLog:
            tracker = app._requestLog.tracker(
                ro.service, ro.userName, userIP, clientID)
            trackerList.append(tracker)

        # Open record stream
        rs = _MyRecordStream(self._rsURL, trackerList, self.__bufferSize)

        forbidden = None

        # Add request streams
        # iterate over inventory networks
        for s in ro.streams:
            for net in self._networkIter(s):
                netRestricted = utils.isRestricted(net)
                if not trackerList and netRestricted and not self.__user:
                    forbidden = forbidden or (forbidden is None)
                    continue
                for sta in self._stationIter(net, s):
                    staRestricted = utils.isRestricted(sta)
                    if not trackerList and staRestricted and not self.__user:
                        forbidden = forbidden or (forbidden is None)
                        continue
                    for loc in self._locationIter(sta, s):
                        for cha in self._streamIter(loc, s):
                            start_time = max(cha.start(), s.time.start)

                            try:
                                end_time = min(cha.end(), s.time.end)
                            except ValueError:
                                end_time = s.time.end

                            if (netRestricted or staRestricted or utils.isRestricted(cha)) and (
                                    not self.__user or (
                                        self.__access and not self.__access.authorize(
                                            self.__user, net.code(), sta.code(),
                                            loc.code(), cha.code(),
                                            start_time, end_time))):

                                for tracker in trackerList:
                                    net_class = 't' if net.code()[0] \
                                        in "0123456789XYZ" else 'p'
                                    tracker.line_status(
                                        start_time, end_time, net.code(),
                                        sta.code(), cha.code(), loc.code(),
                                        True, net_class, True, [], "fdsnws",
                                        "DENIED", 0, "")

                                forbidden = forbidden or (forbidden is None)
                                continue

                            forbidden = False

                            # enforce maximum sample per request restriction
                            if maxSamples is not None:
                                try:
                                    n = cha.sampleRateNumerator()
                                    d = cha.sampleRateDenominator()
                                except ValueError:
                                    msg = "skipping stream without sampling " \
                                          "rate definition: %s.%s.%s.%s" % (
                                              net.code(), sta.code(),
                                              loc.code(), cha.code())
                                    seiscomp.logging.warning(msg)
                                    continue

                                # calculate number of samples for requested
                                # time window
                                diffSec = (end_time - start_time).length()
                                samples += int(diffSec * n / d)
                                if samples > maxSamples:
                                    msg = "maximum number of %sM samples " \
                                          "exceeded" % str(app._samplesM)
                                    return self.renderErrorPage(
                                        req, http.REQUEST_ENTITY_TOO_LARGE, msg, ro)

                            seiscomp.logging.debug(
                                "adding stream: %s.%s.%s.%s %s - %s" % (
                                    net.code(), sta.code(), loc.code(),
                                    cha.code(), start_time.iso(), end_time.iso()))
                            rs.addStream(
                                net.code(), sta.code(), loc.code(), cha.code(),
                                start_time, end_time, utils.isRestricted(cha),
                                sta.archiveNetworkCode())

        if forbidden:
            for tracker in trackerList:
                tracker.volume_status("fdsnws", "DENIED", 0, "")
                tracker.request_status("END", "")

            msg = "access denied"
            return self.renderErrorPage(req, http.FORBIDDEN, msg, ro)

        if forbidden is None:
            for tracker in trackerList:
                tracker.volume_status("fdsnws", "NODATA", 0, "")
                tracker.request_status("END", "")

            msg = "no metadata found"
            return self.renderErrorPage(req, http.NO_CONTENT, msg, ro)

        # Build output filename
        fileName = Application.Instance()._fileNamePrefix.replace(
            "%time", time.strftime('%Y-%m-%dT%H:%M:%S')) + '.mseed'

        # Create producer for async IO
        prod = _WaveformProducer(req, ro, rs, fileName, trackerList)
        req.registerProducer(prod, True)
        prod.resumeProducing()

        # The request is handled by the deferred object
        return server.NOT_DONE_YET
Exemple #8
0
def accessLog(req, ro, code, length, err):
    logger = Application.Instance()._accessLog # pylint: disable=W0212
    if logger is None:
        return

    logger.log(AccessLogEntry(req, ro, code, length, err))
Exemple #9
0
    def _processRequest(self, req, ro, dac):
        if req._disconnected:
            return False

        # tuples: wid, segment, restricted status
        lines = []

        byteCount = 0

        # iterate extents and create IN clauses of parent_oids in bunches
        # of 1000 because the query size is limited
        parentOIDs, idList, tooLarge = [], [], []
        i = 0
        for ext, objID, _ in ro.extentIter(dac, self.user, self.access):
            if req._disconnected:
                return False

            if ro.excludeTooLarge:
                if ext.segmentOverflow():
                    continue
            elif ext.segmentOverflow():
                tooLarge.append(ext)
                continue
            elif tooLarge:
                continue

            if i < 1000:
                idList.append(objID)
                i += 1
            else:
                parentOIDs.append(idList)
                idList = [objID]
                i = 1

        if not ro.excludeTooLarge and tooLarge:
            extents = ', '.join(
                '{0}.{1}.{2}.{3}'.format(e.waveformID().networkCode(),
                                         e.waveformID().stationCode(),
                                         e.waveformID().locationCode(),
                                         e.waveformID().channelCode())
                for e in tooLarge)

            msg = 'Unable to process request due to database limitations. ' \
                  'Some selections have too many segments to process. ' \
                  'Rejected extents: {{{0}}}. This limitation may be ' \
                  'resolved in a future version of this webservice.' \
                  .format(extents)
            self.writeErrorPage(req, http.REQUEST_ENTITY_TOO_LARGE, msg, ro)
            return False

        if len(idList) > 0:
            parentOIDs.append(idList)
        else:
            msg = "no matching availabilty information found"
            self.writeErrorPage(req, http.NO_CONTENT, msg, ro)
            return False

        db = io.DatabaseInterface.Open(Application.Instance().databaseURI())
        if db is None:
            msg = "could not connect to database"
            return self.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro)

        lines = self._lineIter(db, parentOIDs, req, ro, dac.extentsOID())

        byteCount, segCount = self._writeLines(req, lines, ro)

        # Return 204 if no matching availability information was found
        if segCount <= 0:
            msg = "no matching availabilty information found"
            self.writeErrorPage(req, http.NO_CONTENT, msg, ro)
            return True

        logging.debug("%s: returned %i segments (total bytes: %i)" %
                      (ro.service, segCount, byteCount))
        utils.accessLog(req, ro, http.OK, byteCount, None)

        return True