Beispiel #1
0
    def stadates(self,start=False,end=False):
        """
        function to return start and end times for a station

        Get list of valid dates
        """

        if not start: return self.stachan_cache.keys()


        cache = {}

        if not end: end = stock.now()
        if start > end: end = stock.now()
        start = float(start)
        end = float(end)

        for sta in self.stachan_cache:
            for chan in self.stachan_cache[sta]:
                for date in self.stachan_cache[sta][chan]['dates']:

                    if date[1] == -1:

                        if date[0] <= start: cache[sta] = 1
                        if date[0] <= end: cache[sta] = 1

                    else:

                        if date[0] <= start and start <= date[1]: cache[sta] = 1
                        if date[0] <= end and end <= date[1]: cache[sta] = 1
                        if start <= date[0] and date[1] <= end: cache[sta] = 1

        self.logger.info('cache.keys: ' . str(cache.keys()))
        return cache.keys()
Beispiel #2
0
    def stadates(self, start=False, end=False):
        """
        function to return start and end times for a station

        Get list of valid dates
        """

        if not start: return self.stachan_cache.keys()

        cache = {}

        if not end: end = stock.now()
        if start > end: end = stock.now()
        start = float(start)
        end = float(end)

        for sta in self.stachan_cache:
            for chan in self.stachan_cache[sta]:
                for date in self.stachan_cache[sta][chan]['dates']:

                    if date[1] == -1:

                        if date[0] <= start: cache[sta] = 1
                        if date[0] <= end: cache[sta] = 1

                    else:

                        if date[0] <= start and start <= date[1]:
                            cache[sta] = 1
                        if date[0] <= end and end <= date[1]: cache[sta] = 1
                        if start <= date[0] and date[1] <= end: cache[sta] = 1

        self.logger.info('cache.keys: '.str(cache.keys()))
        return cache.keys()
Beispiel #3
0
    def _get_stachan_cache(self):
        """
        private function to load data

        """

        records = 0

        self.logger.info("Stations(): update cache")

        for dbname in self.dbcentral.list():

            self.logger.debug('Station(): dbname: %s' % dbname)

            dates = {}

            query_start_time = time.time()
            try:
                self.logger.debug("Dbopen " + dbname)
                db = datascope.dbopen(dbname, 'r')
                table = 'wfdisc'
                field = 'time'
                self.logger.debug("Dblookup table=%s field=%s" %
                                  (table, field))
                dbwfdisc = db.lookup(table=table, field=field)
                self.logger.debug("Getting record count of " + table)
                records = dbwfdisc.query(datascope.dbRECORD_COUNT)
                self.mintime = dbwfdisc.ex_eval('min(time)')
                self.maxtime = dbwfdisc.ex_eval('max(endtime)')
            except Exception, e:
                self.logger.exception('Problem with wfdisc table. %s: %s' %
                                      (Exception, e))
                sys.exit(reactor.stop())

            elapsed_time = time.time() - query_start_time
            self.logger.debug(
                "Intial dbquery and wfdisc record count took %d seconds" %
                elapsed_time)
            if self.maxtime > stock.now() or self.maxtime > (stock.now() -
                                                             3600):
                self.maxtime = -1

            self.logger.debug("Starting wfdisc processing of %d records" %
                              records)
            prog = ProgressLogger("Stations: processing wfdisc record ",
                                  records,
                                  logger=self.logger)
            for j in range(records):
                prog.tick()
                dbwfdisc.record = j

                try:
                    sta, chan, dbtime = dbwfdisc.getv('sta', 'chan', 'time')
                    self.wfdisc_stachan[sta].add(chan)
                    self.wfdates[stock.yearday(dbtime)] = 1
                except datascope.DatascopeException, e:
                    self.logger.exception('(%s=>%s)' % (Exception, e))
Beispiel #4
0
    def _get_stachan_cache(self):
        """
        private function to load data

        """

        records = 0

        self.logger.info("Stations(): update cache")

        for dbname in self.dbcentral.list():

            self.logger.debug('Station(): dbname: %s' % dbname)

            dates = {}

            query_start_time = time.time()
            try:
                self.logger.debug("Dbopen "+dbname)
                db = datascope.dbopen( dbname , 'r' )
                table='wfdisc'
                field='time'
                self.logger.debug("Dblookup table=%s field=%s" % (table,field))
                dbwfdisc = db.lookup( table=table,field=field)
                self.logger.debug("Getting record count of "+table)
                records = dbwfdisc.query(datascope.dbRECORD_COUNT)
                self.mintime = dbwfdisc.ex_eval('min(time)')
                self.maxtime   = dbwfdisc.ex_eval('max(endtime)')
            except Exception,e:
                self.logger.exception('Problem with wfdisc table. %s: %s' % (
                    Exception, e))
                sys.exit(reactor.stop())

            elapsed_time = time.time() - query_start_time
            self.logger.debug(
                "Intial dbquery and wfdisc record count took %d seconds" %
                elapsed_time)
            if self.maxtime > stock.now() or self.maxtime > (stock.now()-3600):
                self.maxtime = -1


            self.logger.debug("Starting wfdisc processing of %d records" % records)
            prog=ProgressLogger("Stations: processing wfdisc record ",
                                records, logger=self.logger)
            for j in range(records):
                prog.tick()
                dbwfdisc.record = j

                try:
                    sta,chan,dbtime = dbwfdisc.getv('sta','chan','time')
                    self.wfdisc_stachan[sta].add(chan)
                    self.wfdates[stock.yearday(dbtime)] = 1
                except datascope.DatascopeException, e:
                    self.logger.exception('(%s=>%s)' % (Exception,e))
Beispiel #5
0
    def _get_chanperf(self):

        self.logging.debug( "_get_chanperf()")

        today = stock.str2epoch( str(stock.yearday( stock.now() )) )
        lastmonth =  today - (86400 * int(self.perf_days_back))

        month = {}
        week = {}

        fields = ['snet','sta','chan','time','perf']
        steps = [ 'dbopen chanperf', 'dbjoin -o snetsta',
                    'dbsubset time >= %s' % lastmonth ]

        if self.perf_subset:
                    steps.append ( 'dbsubset %s' % self.perf_subset )

        for v in extract_from_db(self.perf_db, steps, fields, self.db_subset):
            snet = v.pop('snet')
            sta = v.pop('sta')
            chan = v.pop('chan')

            fullname = "%s.%s.%s" % ( snet, sta, chan )

            self.logging.debug( "_get_chanperf(%s_%s)" % (snet,sta) )

            if self._verify_cache(snet,sta,'chanperf'):
                try:
                    if len( self.cache[snet][sta]['chanperf'][chan] ) < 1: raise
                except:
                    self.cache[snet][sta]['chanperf'][chan] = {}

                #v['time'] = readable_time( v['time'], '%Y-%m-%d' )
                v['time'] = int( v['time'] )
                self.cache[snet][sta]['chanperf'][chan][ v['time'] ] = v['perf']
Beispiel #6
0
    def _clean_cache(self, cache):
        """
        Need to reshape the dict
        """
        results = []

        for snet in cache:
            if not snet:
                continue
            for sta in cache[snet]:
                if not sta:
                    continue
                # Stringify the dict. This will avoid loosing decimal places
                oldEntry = json.loads(json.dumps(cache[snet][sta]))

                # Generic id for this entry
                oldEntry["id"] = snet + "_" + sta
                oldEntry["dlname"] = snet + "_" + sta

                if not "snet" in oldEntry:
                    oldEntry["snet"] = snet
                if not "sta" in oldEntry:
                    oldEntry["sta"] = sta

                # add entry for autoflush index and IM checks
                oldEntry["lddate"] = datetime.fromtimestamp(stock.now())
                results.append(oldEntry)

        return results
Beispiel #7
0
    def _get_chanperf(self):

        self.logging.debug("_get_chanperf()")

        today = stock.str2epoch(str(stock.yearday(stock.now())))
        lastmonth = today - (86400 * int(self.perf_days_back))

        month = {}
        week = {}

        fields = ["snet", "sta", "chan", "time", "perf"]
        steps = ["dbopen chanperf", "dbjoin -o snetsta", "dbsubset time >= %s" % lastmonth]

        if self.perf_subset:
            steps.append("dbsubset %s" % self.perf_subset)

        for v in extract_from_db(self.perf_db, steps, fields, self.db_subset):
            snet = v.pop("snet")
            sta = v.pop("sta")
            chan = v.pop("chan")

            fullname = "%s.%s.%s" % (snet, sta, chan)

            self.logging.debug("_get_chanperf(%s_%s)" % (snet, sta))

            if self._verify_cache(snet, sta, "chanperf"):
                try:
                    if len(self.cache[snet][sta]["chanperf"][chan]) < 1:
                        raise
                except:
                    self.cache[snet][sta]["chanperf"][chan] = {}

                # v['time'] = readable_time( v['time'], '%Y-%m-%d' )
                v["time"] = int(v["time"])
                self.cache[snet][sta]["chanperf"][chan][v["time"]] = v["perf"]
Beispiel #8
0
    def send(self):
        """Send network report to appropriate e-mail addresses.

        Behaviour:
        Send network report to appropriate e-mail addresses via SMTP
        server

        """
        import sys
        import os

        sys.path.append("%s/data/python" % os.environ["ANTELOPE"])
        from antelope.stock import epoch2str, now

        sys.path.remove("%s/data/python" % os.environ["ANTELOPE"])
        import smtplib

        sender = "auto_qc-noreply@%s" % self.smtp_server
        from_line = "From: %s\n" % sender
        to_line = "To: %s" % self.email[0]
        for rec in self.email[1:]:
            to_line = "%s, %s " % (to_line, rec)
        to_line = "%s\n" % to_line
        subject_line = "Subject: auto_qc network report for %s %s\n" % (
            self.network,
            epoch2str(now(), "%m/%d/%Y"),
        )
        message = "%s%s%s%s" % (from_line, to_line, subject_line, self.summarize())
        try:
            smtpObj = smtplib.SMTP(self.smtp_server)
            smtpObj.sendmail(sender, self.email, message)
            print("Network summary successfully sent.")
        except smtplib.SMTPException:
            print("Error: unable to send e-mail.\n\n")
            print(self.summarize())
Beispiel #9
0
def logmsg(message, forced=False):

    if message is None: message = ''

    from __main__ import verbose
    global globalLog

    if not forced and not verbose: return

    if not isinstance(message, basestring):
        print type(message)
        message = '\n%s\n' % pprint.pformat(message, indent=4)

    #globalLog = '%s\n%s %s' % (globalLog,stock.strtime(stock.now()), message)
    globalLog += '%s %s\n' % (stock.strtime(stock.now()), message)
    print '%s %s' % (stock.strtime(stock.now()), message)
Beispiel #10
0
    def _clean_cache(self, cache):
        """Reinitilize the collection."""
        results = []

        for snet in cache:
            if not snet:
                continue
            for sta in cache[snet]:
                if not sta:
                    continue
                # Stringify the dict. This will avoid loosing decimal places
                oldEntry = json.loads(json.dumps(cache[snet][sta]))

                # Generic id for this entry
                oldEntry["id"] = snet + "_" + sta
                oldEntry["dlname"] = snet + "_" + sta

                if "snet" not in oldEntry:
                    oldEntry["snet"] = snet
                if "sta" not in oldEntry:
                    oldEntry["sta"] = sta

                # add entry for autoflush index and IM checks
                oldEntry["lddate"] = datetime.fromtimestamp(stock.now())
                results.append(oldEntry)

        return results
Beispiel #11
0
    def _get_chanperf(self):

        self.logger.debug("_get_chanperf()")

        today = stock.str2epoch(str(stock.yearday(stock.now())))
        lastmonth = today - (86400 * int(self.perf_days_back))

        fields = ["snet", "sta", "chan", "time", "perf"]
        steps = [
            "dbopen chanperf",
            "dbjoin -o snetsta",
            "dbsubset time >= %s" % lastmonth,
        ]

        if self.perf_subset:
            steps.append("dbsubset %s" % self.perf_subset)

        for v in extract_from_db(self.perf_db, steps, fields, self.db_subset):
            snet = v.pop("snet")
            sta = v.pop("sta")
            chan = v.pop("chan")

            self.logger.debug("_get_chanperf(%s_%s)" % (snet, sta))

            if self._verify_cache(snet, sta, "chanperf"):
                try:
                    if len(self.cache[snet][sta]["chanperf"][chan]) < 1:
                        raise
                except Exception:
                    self.cache[snet][sta]["chanperf"][chan] = {}

                # v['time'] = readable_time( v['time'], '%Y-%m-%d' )
                v["time"] = int(v["time"])
                self.cache[snet][sta]["chanperf"][chan][v["time"]] = v["perf"]
Beispiel #12
0
    def run(self):
        """Run second_moment App."""

        # Create any necessary folders.
        self._setup_folders()

        # set path of matlab script
        sys.path.append(self.matlab_code_path)
        self.matlab_code = self.matlab_code_path + "/" + "run_second_moment.m"

        # -- Set matlab info
        self.logger.info("Start: %s %s" %
                         ("second_moment", stock.strtime(stock.now())))
        self.logger.info("Start: configuration parameter file %s" %
                         self.options.pf)
        self.logger.info(" - Xvfb path: %s" % self.xvfb_path)
        self.logger.info(" - Matlab path: %s" % self.matlab_path)
        self.logger.info(" - Matlab flags: %s" % self.matlab_flags)

        # Set virtual display if needed
        if not self.options.window and self.xvfb_path:
            result = self._setup_xvfb()
            if not result:
                logger.error("Xvfb setup failed. Can't continue.")
                return -1

        self._run_matlab()

        # -- Kill virtual display if needed -- #
        if not self.options.window:
            self._kill_xvfb()
Beispiel #13
0
    def send(self):
        """Send network report to appropriate e-mail addresses.

        Behaviour:
        Send network report to appropriate e-mail addresses via SMTP
        server

        """
        import sys
        import os
        sys.path.append('%s/data/python' % os.environ['ANTELOPE'])
        from antelope.stock import epoch2str, now
        sys.path.remove('%s/data/python' % os.environ['ANTELOPE'])
        import smtplib
        sender = 'auto_qc-noreply@%s' % self.smtp_server
        from_line = 'From: %s\n' % sender
        to_line = 'To: %s' % self.email[0]
        for rec in self.email[1:]:
            to_line = '%s, %s ' % (to_line, rec)
        to_line = '%s\n' % to_line
        subject_line = 'Subject: auto_qc network report for %s %s\n' % \
            (self.network, epoch2str(now(),'%m/%d/%Y'))
        message = '%s%s%s%s' % (from_line, to_line, subject_line,
            self.summarize())
        try:
            smtpObj = smtplib.SMTP(self.smtp_server)
            smtpObj.sendmail(sender, self.email,message)
            print 'Network summary successfully sent.'
        except smtplib.SMTPException:
            print 'Error: unable to send e-mail.\n\n'
            print self.summarize()
Beispiel #14
0
    def set(self, pckt, time):
        """Write values to a statefile."""

        self.logger.debug("set %s to %s" % (self.filename, pckt))

        if not self.filename:
            return

        self.packet = pckt
        self.time = time
        self.strtime = stock.strlocalydtime(time).strip()
        self.latency = stock.strtdelta(stock.now() - time).strip()

        # self.logger.debug( 'latency: %s' % self.latency )

        try:
            self.pointer.seek(0)
            self.pointer.write(
                "%s\n%s\n%s\n%s\n%s\n"
                % (self.packet, self.time, self.strtime, self.latency, self.pid)
            )
        except Exception as e:
            raise stateFileException(
                "Problems while writing to state file: %s %s" % (self.file, e)
            )
Beispiel #15
0
    def max_time(self):
        """Get time of last wfdisc sample."""

        if self.maxtime == -1:
            return stock.now()

        return self.maxtime
Beispiel #16
0
def logmsg(message, forced=False):

    if message is None:
        message = ""

    from __main__ import verbose

    global globalLog

    if not forced and not verbose:
        return

    if not isinstance(message, string_types):
        message = "\n%s\n" % pprint.pformat(message, indent=4)

    globalLog += "%s %s\n" % (stock.strtime(stock.now()), message)
    print("%s %s" % (stock.strtime(stock.now()), message))
Beispiel #17
0
    def _get_orb_sta_latency(self, name):
        """
        Look for all CLIENTS and SOURCES.
        """

        self.logging.debug("Check ORB(%s) sources" % name)

        pkt = Pkt.Packet()

        self.orbs[name]["orb"].select(self.orb_select)
        self.orbs[name]["orb"].reject(".*/pf.*|.*/log|/db/.*|.*/MSTC")

        self.orbs[name]["info"]["status"] = "online"
        self.orbs[name]["info"]["last_check"] = stock.now()

        # get clients
        self.logging.debug("get clients orb(%s)" % name)
        result = self.orbs[name]["orb"].clients()

        for r in result:
            if isinstance(r, float):
                self.orbs[name]["info"]["clients_time"] = r
                self.logging.debug("orb(%s) client time %s" % (name, r))
            else:
                self.orbs[name]["clients"] = r

        # get sources
        self.logging.debug("get sources orb(%s)" % name)
        result = self.orbs[name]["orb"].sources()

        for r in result:
            # Verify if this is a valid field or just the reported time
            if isinstance(r, float):
                self.orbs[name]["info"]["sources_time"] = r
                self.logging.debug("orb(%s) sources time %s" % (name, r))
            else:
                for stash in r:
                    srcname = stash["srcname"]
                    pkt.srcname = Pkt.SrcName(srcname)
                    snet = pkt.srcname.net
                    sta = pkt.srcname.sta

                    # Not sure if this will ever occur
                    if not snet or not sta:
                        continue

                    self.logging.debug("orb(%s) update %s %s" % (name, snet, sta))

                    self._verify_cache(snet, sta, "orb", primary=True)

                    self.cache[snet][sta]["orb"][srcname] = parse_sta_time(stash["slatest_time"])

                    if not "lastpacket" in self.cache[snet][sta]:
                        self.cache[snet][sta]["lastpacket"] = 0

                    if self.cache[snet][sta]["lastpacket"] < self.cache[snet][sta]["orb"][srcname]:
                        self.cache[snet][sta]["lastpacket"] = self.cache[snet][sta]["orb"][srcname]
Beispiel #18
0
    def max_time(self):
        """
        Get time of last wfdisc sample
        """

        if self.maxtime == -1:
            return stock.now()

        return self.maxtime
Beispiel #19
0
    def __call__(self, time=stock.now()):
        """
        method to intercepts data requests.
            time default is "now"
        """

        try:
            time = float(time)
        except Exception, e:
            print "\n*dbcentral*: dbcentral() => error in time=>[%s] %s" % \
                    (time,time.__class__)
Beispiel #20
0
    def __call__(self, time=stock.now()):
        """
        method to intercepts data requests.
            time default is "now"
        """

        try:
            time = float(time)
        except Exception,e:
            print "\n*Dbcentral*: Dbcentral() => error in time=>[%s] %s" % \
                    (time,time.__class__)
Beispiel #21
0
    def update( self ):
        self.logging.info( 'Update orb serials' )

        if isinstance(self.orblist, collections.Iterable):
            for orb in self.orblist:
                self._get_orb_data( orb )

            self.last_update = int( stock.now() )

        else:
            self.logging.error( 'ORBLIST not iterable: ' + str(self.orblist) )
Beispiel #22
0
def parse_sta_time(time, nullval="-"):
    """Verify that we have a valid time, not in the future."""

    try:
        if float(time) < 1.0:
            raise
        if float(time) > stock.now():
            raise
        return int(float(time))
    except Exception:
        return nullval
Beispiel #23
0
    def stadates(self, start=False, end=False):
        """
        Determine start and end times for a station.

        Get station_patterns of valid dates
        """

        if not start:
            return self.stachan_cache.keys()

        cache = {}

        if not end:
            end = stock.now()
        if start > end:
            end = stock.now()
        start = float(start)
        end = float(end)

        for sta in self.stachan_cache:
            for chan in self.stachan_cache[sta]:
                for date in self.stachan_cache[sta][chan]["dates"]:

                    if date[1] == -1:

                        if date[0] <= start:
                            cache[sta] = 1
                        if date[0] <= end:
                            cache[sta] = 1

                    else:

                        if date[0] <= start <= date[1]:
                            cache[sta] = 1
                        if date[0] <= end <= date[1]:
                            cache[sta] = 1
                        if start <= date[0] and date[1] <= end:
                            cache[sta] = 1

        self.logger.info("cache.keys: %s", str(cache.keys()))
        return cache.keys()
Beispiel #24
0
def per_sta_query(flickr, staname, tags, myid, archive, url_path):
    """Create a subprocess
    for the selected station

    """

    logmsg("Start %s at %s" % (staname, stock.strtime(stock.now())))

    try:
        flickr_photo_retrieval(flickr, staname, tags, myid, archive, url_path)
    except Exception as e:
        logerror("%s execution failed: %s" % (staname, e))
Beispiel #25
0
    def update(self):
        """Update the ORBSerials collection."""
        self.logger.info("Update orb serials")

        if isinstance(self.orblist, collections.Iterable):
            for orbname in self.orblist:
                self._get_orb_data(orbname)

            self.last_update = int(stock.now())

        else:
            self.logger.error("ORBLIST not iterable: " + str(self.orblist))
Beispiel #26
0
    def _get_detections(self):
        '''
        Open detection table and get all data for current event.

        TODO: Deal with mopping up of detections not associated with events
        '''
        if not self.table_present['detection']:
            return

        start = int(stock.now())
        end = 0

        self.logger.debug('Basing detection window on arrivals for evid [%d]'
                          % self.evid)
        for orid in self.origins.keys():
            arrivals = self.all_arrivals(orid=orid)
            for arrival in arrivals:

                if 'arrival.time' in arrival:
                    time = arrival['arrival.time']
                else:
                    time = start

                if 'arrival.deltim' in arrival:
                    deltim = arrival['arrival.deltim']
                else:
                    deltim = 0

                if time - deltim < start:
                    start = time - deltim

                if time + deltim > end:
                    end = time + deltim

        if end > start:
            self.logger.debug('Windowing detections from %s to %s'
                              % (stock.epoch2str(start, '%G %T')[:-4],
                                 stock.epoch2str(end, '%G %T')[:-4]))
            steps = ['dbopen detection']
            steps += ['dbsubset time>%s && time<%s' % (start, end)]
            steps += ['dbsubset state=~/%s/' % state
                      for state in self.detection_state_select]
            steps += ['dbsubset state!~/%s/' % state
                      for state in self.detection_state_reject]
            steps += self._seed_channel_steps()

            self.detections.get_view(steps)

        else:
            self.logger.warning(
                'Calculated time-window for detections is not valid: [%s,%s]' %
                (start, end))
Beispiel #27
0
def per_sta_query(flickr, staname, tags, myid, archive, url_path):
    """Create a subprocess
    for the selected station

    """


    logmsg("Start %s at %s" % (staname,stock.strtime(stock.now())) )

    try:
        flickr_photo_retrieval(flickr, staname, tags, myid, archive, url_path)
    except Exception, e:
        logerror( "%s execution failed: %s" % (staname,e) )
Beispiel #28
0
def parse_sta_time(time, nullval="-"):
    """
    Verify that we have a valid time. Not in future.
    """

    try:
        if float(time) < 1.0:
            raise
        if float(time) > stock.now():
            raise
        return int(float(time))
    except:
        return nullval
Beispiel #29
0
    def _extract_data(self):
        """Look for all poc packets."""

        self.orb["last_check"] = stock.now()

        if self.errors > MAX_EXTRACT_ERRORS:
            raise TooManyExtractError("10 consecutive errors on orb.reap()")

        try:
            self.poc.new(self.orb["orb"].reap(self.reap_wait))
        except orb.OrbIncompleteException:
            self.logging.debug("OrbIncompleteException orb.reap(%s)" %
                               self.orbname)
            return True
        except Exception as e:
            self.logging.warning("%s Exception in orb.reap(%s) [%s]" %
                                 (Exception, self.orbname, e))
            self.errors += 1
            return False
        else:
            # reset error counter
            self.errors = 0
            # save ORB id to state file
            self.state.set(self.poc.id, self.poc.time)

        if self.poc.valid:
            self.logging.info("%s" % self.poc)
            # we print this on the statusFile class too...
            self.logging.debug("orblatency %s" %
                               (stock.strtdelta(stock.now() - self.poc.time)))
            self.position = self.poc.id
            self.logging.debug("orbposition %s" % self.position)
            self.orb["last_success"] = stock.now()

            self._update_collection()

        return True
Beispiel #30
0
    def _extract_data(self):
        """
        Look for all poc packets
        """

        self.orb['last_check'] = stock.now()

        if self.errors > 10:
            raise pocException('10 consecutive errors on orb.reap()')

        try:
            self.poc.new( self.orb['orb'].reap(self.reap_wait) )
        except orb.OrbIncompleteException, e:
            self.logging.debug("OrbIncompleteException orb.reap(%s)" % self.orbname)
            return True
Beispiel #31
0
def parse_sta_date(time, epoch=False, nullval="-"):
    """Verify that we have a valid ondate/offdate."""

    try:
        if float(time) < 1.0:
            raise
        if stock.epoch(int(time)) > stock.now():
            raise

        if epoch:
            return stock.epoch(int(time))
        else:
            return int(time)

    except Exception:
        return nullval
Beispiel #32
0
    def set(self, pckt, time):
        if not self.filename: return

        self.packet = pckt
        self.time = time
        self.strtime = stock.strlocalydtime(time).strip()
        self.latency = stock.strtdelta( stock.now()-time ).strip()

        #self.logging.debug( 'Orb latency: %s' % self.latency )

        try:
            self.pointer.seek(0)
            self.pointer.write( '%s\n%s\n%s\n%s\n%s\n' % \
                    (self.packet,self.time,self.strtime,self.latency,self.pid) )
        except Exception, e:
            raise pocException( 'Problems while writing to state file: %s %s' % (self.file,e) )
Beispiel #33
0
def clean_cache_object(cache, id="dlname"):
    """
    Prepare memory dictionary for injection of data into
    a MongoDb structure. We have several requirements:
    1) Base key "dlname" on every element. Unless "id" is defined.
    2) All data should be convertible by json.load()
    3) Base key "time" should be present. This is the time of the data.
    We will create a new key "id" for our returned object. This
    will be unique and if objects repeat in the cache then the
    function will silently overwrite previous entries. We append
    a new key "lddate" with the time of the object creation.
    All data returned should be strings and could be sent directly
    to MongoDB.
    """

    logging = getLogger()

    logging.info("clean_cache_object(%s)" % id)

    results = []

    for entry in cache:
        if not id in entry:
            continue

        # Convert to JSON then back to dict to stringify numeric keys
        entry = json.loads(json.dumps(entry))

        try:
            # Try to find object for id
            if id != "id":
                entry["id"] = entry[id]
        except:
            # Generic id for this entry
            entry["id"] = len(results)

        # add entry for autoflush index
        entry["time_obj"] = datetime.fromtimestamp(entry["time"])

        # add entry for last load of entry
        entry["lddate"] = datetime.fromtimestamp(stock.now())

        results.append(entry)

    return results
Beispiel #34
0
def parse_sta_date(time, epoch=False, nullval="-"):
    """
    Verify that we have a valid ondate/offdate.
    """

    try:
        if float(time) < 1.0:
            raise
        if stock.epoch(int(time)) > stock.now():
            raise

        if epoch:
            return stock.epoch(int(time))
        else:
            return int(time)

    except Exception, e:
        return nullval
Beispiel #35
0
def clean_cache_object(cache, id="dlname"):
    """Clean db2mongo cache data.

    Prepare memory dictionary for injection of data into
    a MongoDb structure. We have several requirements:
        1) Base key "dlname" on every element. Unless "id" is defined.
        2) All data should be convertible by json.load()
        3) Base key "time" should be present. This is the time of the data.

    We create a new key "id" for our returned object. This is unique
    and if objects repeat in the cache then the function will silently
    overwrite previous entries. We append a new key "lddate" with the time of
    the object creation.  All data returned should be strings safe to be sent
    directly to MongoDB.
    """

    logger.info("clean_cache_object(%s)" % id)

    results = []

    for entry in cache:
        if id not in entry:
            continue

        # Convert to JSON then back to dict to stringify numeric keys
        entry = json.loads(json.dumps(entry))

        try:
            # Try to find object for id
            if id != "id":
                entry["id"] = entry[id]
        except Exception:
            # Generic id for this entry
            entry["id"] = len(results)

        # add entry for autoflush index
        entry["time_obj"] = datetime.fromtimestamp(entry["time"])

        # add entry for last load of entry
        entry["lddate"] = datetime.fromtimestamp(stock.now())

        results.append(entry)

    return results
Beispiel #36
0
    def start_daemon(self):
        """Run in a daemon mode."""

        self.logger.debug("Update ORB cache")

        self.logger.debug(self.orbname)

        if not self.orbname or not isinstance(self.orbname, str):
            raise LookupError("Problems with orbname [%s]" % (self.orbname))

        # Expand the object if needed
        if not self.orb:
            self.logger.debug("orb.Orb(%s)" % (self.orbname))
            self.orb = {}
            self.orb["orb"] = None
            self.orb["status"] = "offline"
            self.orb["last_success"] = 0
            self.orb["last_check"] = 0

        self._connect_to_orb()

        while True:
            # Reset the connection if no packets in reap_timeout window
            self.logger.debug("starting next reap cycle")
            if (self.orb["last_success"] and self.reap_timeout and (
                (stock.now() - self.orb["last_success"]) > self.reap_timeout)):
                self.logger.warning("Possible stale ORB connection %s" %
                                    self.orbname)
                if stock.yesno(self.timeout_exit):
                    break
                else:
                    self._connect_to_orb()

            self.logger.debug("calling extract_data")
            if self._extract_data():
                self.logger.debug("Success on extract_data(%s)" %
                                  (self.orbname))
            else:
                self.logger.warning("Problem on extract_data(%s)" %
                                    (self.orbname))

        self.orb["orb"].close()

        return 0
Beispiel #37
0
    def _get_detections(self):
        """
        Open detection table and get all data for a particular ORID.
        Save the origin parameters in memory.
        """

        start = int(stock.now())
        end = 0

        # Calculate time window for event
        for orid in self.origins.keys():
            self.logging.info("Get arrivals for orid %s" % orid)
            for arrival in self.all_arrivals_orid(orid):

                try:
                    if int(arrival["arrival.time"]) < start:
                        start = int(arrival["arrival.time"])

                    if int(arrival["arrival.time"]) > end:
                        end = int(arrival["arrival.time"])
                except:
                    pass

        if end > start:
            # We have a good time window. Let's get the entries.
            steps = ["dbopen detection"]
            steps.extend(["dbsubset time > %s && time < %s" % (start, end)])
            steps.extend(["dbjoin -o snetsta"])
            steps.extend(["dbjoin -o sitechan"])
            [
                steps.extend(["dbsubset state =~ /%s/" % x])
                for x in self.detection_state_select
                if self.detection_state_select
            ]
            [
                steps.extend(["dbsubset state !~ /%s/" % x])
                for x in self.detection_state_reject
                if self.detection_state_reject
            ]

            self.detections.get_view(steps)

        else:
            self.logging.warning("Calculated time-window for detections is not valid: [%s,%s]" % (start, end))
Beispiel #38
0
    def start_daemon(self):
        """
        Look into every ORB listed on the parameter file
        and track some information from them.
        """

        self.logging.debug( "Update ORB cache" )

        self.logging.debug( self.orbname )

        if not self.orbname or not isinstance(self.orbname, str):
            raise LookupError( "Problems with orbname [%s]" % (self.orbname) )

        # Expand the object if needed
        if not self.orb:
            self.logging.debug( "orb.Orb(%s)" % (self.orbname) )
            self.orb = {}
            self.orb['orb'] = None
            self.orb['status'] = 'offline'
            self.orb['last_success'] = 0
            self.orb['last_check'] = 0

        self._connect_to_orb()

        while True:
            # Reset the connection if no packets in reap_timeout window
            if self.orb['last_success'] and self.reap_timeout and \
                    ( (stock.now() - self.orb['last_success']) > self.reap_timeout):
                self.logging.warning('Possible stale ORB connection %s' % self.orbname)
                if stock.yesno(self.timeout_exit):
                    break
                else:
                    self._connect_to_orb()

            if self._extract_data():
                self.logging.debug( "Success on extract_data(%s)" % (self.orbname) )
                pass
            else:
                self.logging.warning( "Problem on extract_data(%s)" % (self.orbname) )

        self.orb['orb'].close()

        return 0
Beispiel #39
0
    def __call__(self, time=stock.now()):
        """
        method to intercepts data requests.
            time default is "now"
        """

        try:
            time = float(time)
        except Exception:
            print("\n*Dbcentral*: Dbcentral() => error in time=>[%s] %s" %
                  (time, time.__class__))
        else:
            for element in sorted(self.dbs):
                start = self.dbs[element]["times"][0]
                end = self.dbs[element]["times"][1]
                if start < time and time < end:
                    return element

        raise DbcentralException("No db match for time=>[%s]" % time)
Beispiel #40
0
    def run_forever(self):
        """Track POC packets from orbservers."""

        self.logging.debug("Update ORB cache")

        self.logging.debug(self.orbname)

        if not self.orbname:
            raise ConfigurationError("orbname is missing [%s]" %
                                     (self.orbname))

        # Create the orbserver state tracking dict if needed
        if not self.orb:
            self.logging.debug("orb.Orb(%s)" % (self.orbname))
            self.orb = {}
            self.orb["orb"] = None
            self.orb["status"] = "offline"
            self.orb["last_success"] = 0
            self.orb["last_check"] = 0

        self._connect_to_orb()

        while True:
            # Reset the connection if no packets in reap_timeout window
            if (self.orb["last_success"] and self.reap_timeout and (
                (stock.now() - self.orb["last_success"]) > self.reap_timeout)):
                self.logging.warning("Possible stale ORB connection %s" %
                                     self.orbname)
                if stock.yesno(self.timeout_exit):
                    break
                else:
                    self._connect_to_orb()

            if self._extract_data():
                # self.logging.debug( "Success on extract_data(%s)" % (self.orbname) )
                pass
            else:
                self.logging.warning("Problem on extract_data(%s)" %
                                     (self.orbname))
                self._connect_to_orb()

        self.orb["orb"].close()
Beispiel #41
0
    def _clean_cache(self, cache):

        results = []

        for entry in cache:
            # Convert to JSON then back to dict to stringify numeric keys
            entry = json.loads(json.dumps(entry))

            # Generic id for this entry
            entry["id"] = entry["evid"]

            # add entry for autoflush index
            entry["time_obj"] = datetime.fromtimestamp(entry["time"])

            # add entry for last load of entry
            entry["lddate"] = datetime.fromtimestamp(stock.now())

            results.append(entry)

        return results
Beispiel #42
0
    def _clean_cache(self, cache):

        results = []

        for entry in cache:
            # Convert to JSON then back to dict to stringify numeric keys
            entry = json.loads( json.dumps( entry ) )

            # Generic id for this entry
            entry['id'] = entry['evid']

            # add entry for autoflush index
            entry['time_obj'] = datetime.fromtimestamp( entry['time'] )

            # add entry for last load of entry
            entry['lddate'] = datetime.fromtimestamp( stock.now() )

            results.append( entry )

        return results
Beispiel #43
0
    def _get_detections(self):
        """
        Open detection table and get all data for a particular ORID.
        Save the origin parameters in memory.
        """

        start = int(stock.now())
        end = 0

        # Calculate time window for event
        for orid in self.origins.keys():
            self.logging.info('Get arrivals for orid %s' % orid )
            for arrival in self.all_arrivals_orid( orid ):

                try:
                    if int(arrival['arrival.time']) < start:
                        start = int(arrival['arrival.time'])

                    if int(arrival['arrival.time']) > end:
                        end = int(arrival['arrival.time'])
                except:
                    pass


        if end > start:
            # We have a good time window. Let's get the entries.
            steps = ['dbopen detection']
            steps.extend(['dbsubset time > %s && time < %s' % (start, end) ])
            steps.extend(['dbjoin -o snetsta'])
            steps.extend(['dbjoin -o sitechan'])
            [ steps.extend( [ 'dbsubset state =~ /%s/' % x ] ) for x in self.detection_state_select if self.detection_state_select]
            [ steps.extend( [ 'dbsubset state !~ /%s/' % x ] ) for x in self.detection_state_reject if self.detection_state_reject]

            self.detections.get_view( steps )

        else:
            self.logging.warning('Calculated time-window for detections is not valid: [%s,%s]' % (start, end) )
Beispiel #44
0
    def _extract_data(self):
        """Reap data from orb packets."""

        self.orb["last_check"] = stock.now()

        if self.errors > MAX_EXTRACT_ERRORS:
            raise TooManyOrbExtractErrors(
                "%s consecutive errors on orb.reap()" % MAX_EXTRACT_ERRORS)

        try:
            # REAP new packet from ORB
            # Squelch RuntimeWarnings from the Antelope layer
            with warnings.catch_warnings():
                warnings.simplefilter("ignore")
                pktbuf = self.orb["orb"].reap(self.reap_wait)
                # Extract packet into our internal object
                self.packet.new(pktbuf)

        except orb.OrbIncompleteException as e:
            self.logger.debug("Caught OrbIncompleteException: " + str(e),
                              exc_info=True)
            self.errors = 0
            return True

        except stock.PfException:
            self.logger.exception("Couldn't read from pf packet.")
            self.error += 1
            return False

        except Exception:
            self.logger.exception(
                "Unknown Exception occurred while extracting data(%s)" %
                (self.orbname))
            self.errors += 1
            return False

        self.logger.debug("_extract_data(%s,%s)" %
                          (self.packet.id, self.packet.time))

        # reset error counter
        self.errors = 0

        if not self.packet.id or not self.packet.valid:
            self.logger.debug("_extract_data() => Not a valid packet")
            return False

        # save ORB id to state file
        self.state.set(self.packet.id, self.packet.time)

        self.logger.debug("errors:%s" % self.errors)

        if self.packet.valid:
            self.logger.debug("%s" % self.packet)
            # we print this on the statusFile class too...
            self.logger.debug(
                "orblatency %s" %
                (stock.strtdelta(stock.now() - self.packet.time)))
            self.position = self.packet.id
            self.logger.debug("orbposition %s" % self.position)
            self.orb["last_success"] = stock.now()

            self._update_collection()
        else:
            self.logger.debug("invalid packet: %s %s" %
                              (self.packet.id, self.packet.srcname))
            return False

        return True
Beispiel #45
0
            return True
        except Exception,e:
            self.logging.warning("%s Exception in orb.reap(%s) [%s]" % (Exception,self.orbname,e))
            self.errors += 1
            return False
        else:
            # reset error counter
            self.errors = 0
            # save ORB id to state file
            self.state.set(self.poc.id,self.poc.time)

        if self.poc.valid:
            self.logging.info( '%s' % self.poc )
            # we print this on the statusFile class too...
            self.logging.debug( 'orblatency %s' % \
                    ( stock.strtdelta( stock.now() - self.poc.time ) ) )
            self.position = self.poc.id
            self.logging.debug( 'orbposition %s' % self.position )
            self.orb['last_success'] = stock.now()

            self._update_collection()

        return True

    def _update_collection(self):

        self.logging.info( 'update_collection()' )

        # Verify if we need to update MongoDB
        if self.poc.valid:
            self.logging.debug('collection.update(%s)' % self.poc.sn)
Beispiel #46
0
    def pull_data(self):
        """Get data from the MongoDB instance."""

        if not self.valid:
            self.logger.debug("*** INVALID INSTANCE *** %s" % self.name)
            return False

        if float(self.read_position) < 0.0:
            ignore_history = True
            logSeqNo = 0
            seqNo = 0
        else:
            ignore_history = False

            # ID has 2 parts. Need to split them.
            try:
                temp = str(self.read_position).split(".")
            except Exception:
                temp = [int(self.read_position)]

            if len(temp) == 2:
                logSeqNo = int(temp[0])
                seqNo = int(temp[1])
            else:
                logSeqNo = int(self.read_position)
                seqNo = False

        # Get all documents with id equal or grater than last successful id...
        for post in sorted(
                self.collection.find({"messageLogSeqNo": {
                    "$gte": logSeqNo
                }}),
                key=lambda x: (x["messageLogSeqNo"], x["seqNo"]),
        ):

            try:
                # In case we bring an older segNo document...
                if logSeqNo == post["messageLogSeqNo"] and seqNo:
                    if seqNo >= post["seqNo"]:
                        self.logger.debug(
                            "Skipping processed packet %s.%s" %
                            (post["messageLogSeqNo"], post["seqNo"]))
                        continue
            except Exception as e:
                self.logger.warning("Invalid document: %s: %s" %
                                    (Exception, e))
                continue

            # self.logger.notify( post )
            self.packet.new(
                post,
                name_type=self.pckt_name_type,
                select=self.mongo_select,
                reject=self.mongo_reject,
                silent=self.silent_pkt_fail,
            )

            if not self.packet.valid:
                continue

            # save packet id to state file
            self.state.set(self.packet.id, self.packet.time)

            # track last id
            self.read_position = self.packet.id

            if ignore_history:
                self.logger.info("Skip. Ignoring old packets.")
                continue

            if not self.packet.valid:
                self.logger.warning("*** SKIP - INVALID PACKET ***")
                continue

            # Test connection. Reset if missing
            if not self._test_orb():
                self._connect_to_orb()

            self.logger.debug("Put new packet in orb: %s" % self.packet.pkt)
            pkttype, pktbuf, srcname, time = self.packet.pkt.stuff()
            self.packet.orbid = self.orb["orb"].putx(srcname, self.packet.time,
                                                     pktbuf)

        self.lastread = stock.now()
Beispiel #47
0
    def _get_orb_sta_latency(self, name):
        """Get client and source orb latencies."""

        self.logger.debug("Check ORB(%s) sources" % name)

        pkt = Pkt.Packet()

        self.orbs[name]["orb"].select(self.orb_select)
        self.orbs[name]["orb"].reject(".*/pf.*|.*/log|/db/.*|.*/MSTC")

        self.orbs[name]["info"]["status"] = "online"
        self.orbs[name]["info"]["last_check"] = stock.now()

        # get clients
        self.logger.debug("get clients orb(%s)" % name)
        result = self.orbs[name]["orb"].clients()

        for r in result:
            if isinstance(r, float):
                self.orbs[name]["info"]["clients_time"] = r
                self.logger.debug("orb(%s) client time %s" % (name, r))
            else:
                self.orbs[name]["clients"] = r

        # get sources
        self.logger.debug("get sources orb(%s)" % name)
        result = self.orbs[name]["orb"].sources()

        for r in result:
            # Verify if this is a valid field or just the reported time
            if isinstance(r, float):
                self.orbs[name]["info"]["sources_time"] = r
                self.logger.debug("orb(%s) sources time %s" % (name, r))
            else:
                for stash in r:
                    srcname = stash["srcname"]
                    pkt.srcname = Pkt.SrcName(srcname)
                    snet = pkt.srcname.net
                    sta = pkt.srcname.sta

                    # Not sure if this will ever occur
                    if not snet or not sta:
                        continue

                    self.logger.debug("orb(%s) update %s %s" % (name, snet, sta))

                    self._verify_cache(snet, sta, "orb", primary=True)

                    self.cache[snet][sta]["orb"][srcname] = parse_sta_time(
                        stash["slatest_time"]
                    )

                    if "lastpacket" not in self.cache[snet][sta]:
                        self.cache[snet][sta]["lastpacket"] = 0

                    if (
                        self.cache[snet][sta]["lastpacket"]
                        < self.cache[snet][sta]["orb"][srcname]
                    ):
                        self.cache[snet][sta]["lastpacket"] = self.cache[snet][sta][
                            "orb"
                        ][srcname]
Beispiel #48
0
    def render_uri(self,request):

        #
        # Clean and prep vars
        #
        response_data = {}
        response_meta = {}

        response_meta.update( {
            "error":      'false',
            "setupEvents": self.config.event,
            "setupUI":    'false',
            "realtime":   self.config.realtime,
            #"proxy_url":  self.config.proxy_url,
            "style":      self.config.style,
            "meta_query": "false"
        } )

        #if self.config.proxy_url: response_meta['proxy'] = "'" + self.config.proxy_url + "'"

        #
        # remove all empty  elements
        # This (localhost:8008/stations/) is the same as # (localhost:8008/stations)
        #
        path = request.prepath
        while True:
            try:
                path.remove('')
            except:
                break


        # Parse all elements on the list
        query = self._parse_request(path)

        if 'precision' in request.args:
            query.update( { "precision":int(request.args['precision'][0]) })
        else:
            query.update( { "precision":1} )

        if 'period' in request.args:
            query.update( { "period":int(request.args['period'][0]) })
        else:
            query.update( { "period":0} )

        if 'median' in request.args:
            test = request.args['median'][0]
            if test.lower() in ("yes", "true", "t", "1"):
                query.update( { "median":1 } )
            else:
                query.update( { "median":0 } )
        else:
            query.update( { "median":0 } )

        if 'realtime' in request.args:
            test = request.args['realtime'][0]
            if test.lower() in ("yes", "true", "t", "1"):
                query.update( { "realtime":1 } )
            else:
                query.update( { "realtime":0 } )
        else:
            query.update( { "realtime":0 } )


        if 'filter' in request.args:
            filter = request.args['filter'][0]
            query.update( { "filter":filter.replace('_',' ') } )
        else:
            query.update( { "filter":'None' } )


        if 'calibrate' in request.args:
            test = request.args['calibrate'][0]
            if test.lower() in ("yes", "true", "t", "1"):
                query.update( { "calibrate":1 } )
            else:
                query.update( { "calibrate":0 } )
        else:
            request.args.update( { "calibrate":[self.config.apply_calib] } )
            query.update( { "calibrate":self.config.apply_calib } )


        self.logger.debug('QueryParser(): render_uri() request.prepath => path(%s)[%s]' % (len(path),path) )
        self.logger.debug('QueryParser(): render_uri() query => [%s]' % query)

        if query['data']:


                self.logger.debug('QueryParser(): render_uri() "data" query')

                if len(path) == 0:
                    #ERROR: we need one option
                    self.logger.error('QueryParser(): render_uri() ERROR: Empty "data" query!')
                    return self.uri_results(request,'Invalid data query.')


                elif path[0] == 'events':

                    """
                    Return events dictionary as JSON objects. For client ajax calls.
                    Called with or without argument.
                    """

                    self.logger.debug('QueryParser(): render_uri() query => data => events')
                    if self.config.event != 'true':
                        return self.uri_results(request,{})

                    elif len(path) == 2:
                        return self.uri_results( request, self.events(path[1]) )

                    elif len(path) == 3:
                        return self.uri_results( request, self.events.phases(path[1],path[2]) )

                    else:
                        return self.uri_results(request,self.events.table() )


                elif path[0] == 'dates':

                    """
                    Return list of yearday values for time in db
                    for all wfs in the cluster of dbs.
                    """

                    self.logger.debug('QueryParser(): render_uri() query => data => dates')

                    return self.uri_results( request, self.stations.dates() )



                elif path[0] == 'stadates':

                    """
                    Return list of yearday values for time in db
                    for all stations in the cluster of dbs.
                    """

                    self.logger.debug('QueryParser(): render_uri() query => data => dates')

                    if len(path) == 2:
                        return self.uri_results( request, self.stations.stadates(path[1]) )

                    if len(path) == 3:
                        return self.uri_results( request, self.stations.stadates(path[1],path[2]) )

                    return self.uri_results( request, self.stations.stadates() )



                elif path[0] == 'stations':

                    """
                    Return station list as JSON objects. For client ajax calls.
                    Called with argument return dictionary
                    """

                    self.logger.debug('QueryParser(): render_uri() query => data => stations')

                    if len(path) == 2:
                        return self.uri_results( request, self.stations(path[1]) )

                    return self.uri_results( request, self.stations.list() )


                elif path[0] == 'channels':

                    """
                    Return channels list as JSON objects. For client ajax calls.
                    """

                    self.logger.debug('QueryParser(): render_uri() query => data => channels')

                    if len(path) == 2:
                        stas = self.stations.convert_sta(path[1].split('|'))
                        return self.uri_results( request, self.stations.channels( stas ) )

                    return self.uri_results( request, self.stations.channels() )


                elif path[0] == 'now':

                    """
                    Return JSON object for epoch(now).
                    """

                    self.logger.debug('QueryParser(): render_uri() query => data => now')

                    return self.uri_results( request, [stock.now()] )


                elif path[0] == 'filters':

                    """
                    Return list of filters as JSON objects. For client ajax calls.
                    """

                    self.logger.debug('QueryParser(): render_uri() query => data => filters %s' % self.config.filters)

                    return self.uri_results( request, self.config.filters )


                elif path[0] == 'wf':

                    """
                    Return JSON object of data. For client ajax calls.
                    """

                    self.logger.debug(
                        "QueryParser(): render_uri(): get_data(%s))" % query)

                    return self.uri_results( request, self.get_data(query) )



                elif path[0] == 'coverage':

                    """
                    Return coverage tuples as JSON objects. For client ajax calls.
                    """
                    self.logger.debug("QueryParser(): render_uri(): Get coverage")

                    query.update( { "coverage": 1 } )

                    return self.uri_results( request, self.get_data(query) )



                else:
                    #ERROR: Unknown query type.
                    return self.uri_results( request, "Unknown query type:(%s)" % path )




        response_meta.update(self.tvals)

        if not path:
            return  self.uri_results(
                request,
                self.root_template.safe_substitute(response_meta)
            )

        response_meta['meta_query'] = {}
        response_meta['meta_query']['sta'] = query['sta']
        response_meta['meta_query']['chan'] = query['chan']
        response_meta['meta_query']['time_start'] = query['start']
        response_meta['meta_query']['time_end'] = query['end']
        response_meta['meta_query']['page'] = query['page']

        if request.args:
            response_meta['setupUI'] = json.dumps(request.args)

        response_meta['meta_query'] = json.dumps( response_meta['meta_query'] )

        if path[0] == 'wf':
            return  self.uri_results(
                request,
                self.root_template.safe_substitute(response_meta)
            )

        elif path[0] == 'plot':
            return  self.uri_results(
                request,
                self.plot_template.safe_substitute(response_meta)
            )

        return self.uri_results( request, "Invalid query."  )
Beispiel #49
0
    def _set_tags(self):
        """Add quick identifier based on geo region.

        TA array expands into multiple geographical regions.
        We need to add some quick identifier to the data blob.
        """
        for snet in self.cache:
            if not snet:
                continue
            for sta in self.cache[snet]:
                if not sta:
                    continue

                self.logger.debug("_set_tags(%s_%s)" % (snet, sta))

                if self._verify_cache(snet, sta, "tags"):
                    try:
                        if len(self.cache[snet][sta]["tags"]) < 1:
                            raise
                    except Exception:
                        self.cache[snet][sta]["tags"] = []

                    # Tags for sites on the YUKON area
                    if (
                        self.cache[snet][sta]["lat"] > 58.0
                        and self.cache[snet][sta]["lon"] > -147.0
                    ):
                        self.cache[snet][sta]["tags"].append("yukon")

                    # Tags for TA **ONLY**
                    if snet == "TA":
                        self.cache[snet][sta]["tags"].append("usarray")

                        if self.cache[snet][sta]["vnet"] == "_CASCADIA-TA":
                            self.cache[snet][sta]["tags"].append("cascadia")

                        if self.cache[snet][sta]["lat"] > 50:
                            self.cache[snet][sta]["tags"].append("alaska")
                        else:
                            self.cache[snet][sta]["tags"].append("low48")

                        # Need to identify active BGAN connections
                        bgantag = "non-bgan"
                        try:
                            for c in self.cache[snet][sta]["comm"]:
                                # active?
                                if c["endtime"] == "-":
                                    # BGAN?
                                    if c["commtype"] == "BGAN":
                                        # matched
                                        bgantag = "bgan"
                        except Exception:
                            pass

                        # Add BGAN results
                        self.cache[snet][sta]["tags"].append(bgantag)

                    # Activity tag
                    if (
                        self.cache[snet][sta]["time"] == "-"
                        or self.cache[snet][sta]["time"] > stock.now()
                    ):
                        self.cache[snet][sta]["tags"].append("prelim")
                    elif (
                        self.cache[snet][sta]["endtime"] == "-"
                        or self.cache[snet][sta]["endtime"] > stock.now()
                    ):
                        self.cache[snet][sta]["tags"].append("active")
                    else:
                        self.cache[snet][sta]["tags"].append("decommissioned")

                    # Adoption tag
                    if "adoption" in self.cache[snet][sta]:
                        self.cache[snet][sta]["tags"].append("adopted")

                    # Certification tag
                    if "cert_time" in self.cache[snet][sta]:
                        if (
                            self.cache[snet][sta]["cert_time"] == "-"
                            or self.cache[snet][sta]["cert_time"] > stock.now()
                        ):
                            self.cache[snet][sta]["tags"].append("uncertified")
                        elif (
                            self.cache[snet][sta]["decert_time"] == "-"
                            or self.cache[snet][sta]["decert_time"] < stock.now()
                        ):
                            self.cache[snet][sta]["tags"].append("certified")
                        else:
                            self.cache[snet][sta]["tags"].append("decertified")
Beispiel #50
0
    def _get_event_cache(self):
        #private function to load the data from the tables

        self.logger.info("Events(): update cache")

        for dbname in self.dbcentral.list():

            self.logger.debug("Events(): dbname: %s" % dbname)

            # Get min max for wfdisc table first
            try:
                db = datascope.dbopen( dbname , 'r' )
                db = db.lookup( table='wfdisc')
                start = db.ex_eval('min(time)')
                end = db.ex_eval('max(endtime)')
                if end > stock.now():
                    end = stock.now()
                records = db.query(datascope.dbRECORD_COUNT)

            except:
                records = 0


            if records:

                if not self.start:
                    self.start = start

                elif self.start > start:
                    self.start = start

                if not self.end:
                    self.end = end

                elif self.end < end:
                    self.end = end

            try:
                db.close()
            except:
                pass

            try:
                db = datascope.dbopen( dbname , 'r' )
                db = db.lookup( table='event')
                records = db.query(datascope.dbRECORD_COUNT)

            except:
                records = 0

            if records:

                try:
                    db = db.join( 'origin' )
                    db = db.subset( 'orid == prefor' )
                except:
                    pass

            else:

                try:
                    db = db.lookup( table='origin' )
                except:
                    pass


            try:
                records = db.query(datascope.dbRECORD_COUNT)
            except:
                records = 0


            if not records:
                self.logger.error('Events(): No records to work on any table')
                continue

            self.logger.debug("Events(): origin db_pointer: [%s,%s,%s,%s]" % (
                db['database'],db['table'],db['field'],db['record']))

            try:
                db = db.subset("time > %f" % self.start)
                db = db.subset("time < %f" % self.end)
            except:
                pass

            try:
                records = db.query(datascope.dbRECORD_COUNT)
            except:
                records = 0

            if not records:
                self.logger.error('Events(): No records after time subset')
                continue

            for i in range(records):

                db.record = i

                (orid,time,lat,lon,depth,auth,mb,ml,ms,nass) = db.getv('orid','time','lat','lon','depth','auth','mb','ml','ms','nass')

                if auth == self.nulls('auth'):
                    auth = '-'

                if orid == self.nulls('orid'):
                    orid = '-'

                if time == self.nulls('time'):
                    time = '-'
                else:
                    time = "%0.2f" % time

                if lat == self.nulls('lat'):
                    lat = '-'
                else:
                    lat = "%0.2f" % lat

                if lon == self.nulls('lon'):
                    lon = '-'
                else:
                    lon = "%0.2f" % lon

                if depth == self.nulls('depth'):
                    depth = '-'
                else:
                    depth = "%0.2f" % depth

                if mb == self.nulls('mb'):
                    mb = '-'
                else:
                    mb = "%0.1f" % mb

                if ms == self.nulls('ms'):
                    ms = '-'
                else:
                    ms = "%0.1f" % ms

                if ml == self.nulls('ml'):
                    ml = '-'
                else:
                    ml = "%0.1f" % ml

                if nass == self.nulls('nass'):
                    nass = '-'
                else:
                    nass = "%d" % nass


                self.event_cache[orid] = {'time':time, 'lat':lat, 'lon':lon, 'depth':depth, 'auth':auth, 'mb':mb, 'ms':ms, 'ml':ml, 'nass':nass}

                if mb > 0:
                    self.event_cache[orid]['magnitude'] = mb
                    self.event_cache[orid]['mtype'] = 'Mb'
                elif ms > 0:
                    self.event_cache[orid]['magnitude'] = ms
                    self.event_cache[orid]['mtype'] = 'Ms'
                elif ml > 0:
                    self.event_cache[orid]['magnitude'] = ml
                    self.event_cache[orid]['mtype'] = 'Ml'
                else:
                    self.event_cache[orid]['magnitude'] = '-'
                    self.event_cache[orid]['mtype'] = '-'

            try:
                db.close()
            except:
                pass

        self.logger.info("Events(): Done updating cache. (%s)" % len(
            self.event_cache))

        self.logger.debug('Events(): %s' % self.event_cache.keys())
Beispiel #51
0
    def _get_list(self):
        try:
            db = datascope.dbopen(self.path, "r")
        except Exception as e:
            raise DbcentralException("Cannot open database %s (%s)" % (self.path, e))

        try:
            db = db.lookup("", "clusters", "", "")
        except datascope.DblookupFieldError:
            self.type = "masquerade"
            self.nickname = None
            self.dbs[self.path] = {"times": [-10000000000.0, 10000000000.0]}
            self.logger.info("Not a dbcentral database. Set single database.")
            return

        else:
            self.type = "dbcentral"
            if self.nickname is None:
                raise ValueError("Need nickname for Dbcentral clustername regex.")

        try:
            db = db.lookup("", "clusters", "", "dbNULL")
            null_time, null_endtime = db.getv("time", "endtime")
        except Exception as e:
            raise DbcentralException(
                "Cannot look up null values in clusters table. (%s)" % e
            )

        expr = "clustername =='%s'" % self.nickname

        try:
            db = db.subset(expr)
        except Exception as e:
            raise DbcentralException("Cannot subset on clustername. %s" % e)

        try:
            db = db.sort("time")
            nclusters = db.record_count
        except Exception as e:
            raise DbcentralException("Cannot sort on 'time' . %s" % e)

        if nclusters < 1:
            raise DbcentralException('No matches for nickname "%s".' % self.nickname)

        self.logger.debug("Records=%s" % nclusters)

        for i in range(nclusters):
            self.logger.debug("db.record=%s" % i)
            db.record = i

            try:
                dbname_template = db.extfile()[-1]
            except Exception as e:
                raise DbcentralException("Cannot run db.extfile(). %s" % e)

            self.logger.debug("dbname_template=%s" % dbname_template)

            try:
                volumes, net, time, endtime = db.getv(
                    "volumes", "net", "time", "endtime"
                )
            except Exception as e:
                raise DbcentralException(
                    "Problems with db.getv('volumes','net',"
                    + "'time','endtime'). (%s)\n" % e
                )

            self.logger.debug("volumes=%s" % volumes)
            self.logger.debug("net=%s" % net)
            self.logger.debug("time=%s" % time)
            self.logger.debug("endtime=%s" % endtime)

            if endtime == null_endtime:
                # This will be problematic with realtime systems
                endtime = stock.now()

            self.logger.debug("endtime=%s" % endtime)

            start_year = int(stock.epoch2str(time, "%Y"))
            end_year = int(stock.epoch2str(endtime, "%Y"))
            start_month = int(stock.epoch2str(time, "%L"))
            end_month = int(stock.epoch2str(endtime, "%L"))

            if volumes == "single":

                dbname = stock.epoch2str(time, dbname_template)
                self._test_db(time, endtime, dbname)

            elif volumes == "year":

                for y in range(start_year, end_year + 1):

                    voltime = stock.str2epoch("1/1/%s 00:00:00" % y)
                    volendtime = stock.str2epoch("12/31/%s 23:59:59" % y)
                    dbname = stock.epoch2str(voltime, dbname_template)

                    self._test_db(voltime, volendtime, dbname)

            elif volumes == "month":

                vol_month = start_month
                vol_year = start_year
                vol_endmonth = end_month
                vol_endyear = end_year

                while vol_year < end_year or (
                    vol_year == end_year and vol_month <= end_month
                ):

                    voltime = stock.str2epoch("%d/1/%d" % (vol_month, vol_year))

                    if vol_month < 12:
                        vol_month = vol_month + 1
                    else:
                        vol_year = vol_year + 1
                        vol_month = 1

                    volendtime = (
                        stock.str2epoch("%d/1/%d" % (vol_endmonth, vol_endyear)) - 1
                    )
                    dbname = stock.epoch2str(int(voltime), dbname_template)

                    self._test_db(voltime, volendtime, dbname)

            elif volumes == "day":

                start_day = int(stock.yearday(time))
                end_day = int(stock.yearday(endtime))

                vol_day = start_day

                while vol_day <= end_day:

                    voltime = stock.epoch(vol_day)
                    volendtime = voltime + 86399  # full day -1 sec
                    dbname = stock.epoch2str(voltime, dbname_template)

                    if self._test_db(voltime, volendtime, dbname):
                        self.dbs[dbname] = {"times": [time, endtime]}

                    vol_day = stock.yearday((stock.epoch(vol_day) + 86400))

            else:
                raise UnknownVolumeTypeException(volumes)

        self.logger.debug("DBS=%s" % self.dbs.keys())
Beispiel #52
0
    def _set_tags(self):
        """
        TA array expands into multiple geographical regions.
        We need to add some quick identifier to the data blob.
        """
        for snet in self.cache:
            if not snet:
                continue
            for sta in self.cache[snet]:
                if not sta:
                    continue

                self.logging.debug("_set_tags(%s_%s)" % (snet, sta))

                if self._verify_cache(snet, sta, "tags"):
                    try:
                        if len(self.cache[snet][sta]["tags"]) < 1:
                            raise
                    except:
                        self.cache[snet][sta]["tags"] = []

                    # Tags for sites on the YUKON area
                    if (
                        self.cache[snet][sta]["lat"] > 50
                        and self.cache[snet][sta]["lon"] > -141
                        and self.cache[snet][sta]["lon"] < -120
                    ):
                        self.cache[snet][sta]["tags"].append("yukon")

                    # Tags for TA **ONLY**
                    if snet == "TA":
                        self.cache[snet][sta]["tags"].append("usarray")

                        if self.cache[snet][sta]["vnet"] == "_CASCADIA-TA":
                            self.cache[snet][sta]["tags"].append("cascadia")

                        if self.cache[snet][sta]["lat"] > 50:
                            self.cache[snet][sta]["tags"].append("alaska")
                        else:
                            self.cache[snet][sta]["tags"].append("low48")

                        # Need to identify active BGAN connections
                        bgantag = "non-bgan"
                        try:
                            for c in self.cache[snet][sta]["comm"]:
                                # active?
                                if c["endtime"] == "-":
                                    # BGAN?
                                    if c["commtype"] == "BGAN":
                                        # matched
                                        bgantag = "bgan"
                        except:
                            pass

                        # Add BGAN results
                        self.cache[snet][sta]["tags"].append(bgantag)

                    # Activity tag
                    if self.cache[snet][sta]["time"] == "-" or self.cache[snet][sta]["time"] > stock.now():
                        self.cache[snet][sta]["tags"].append("prelim")
                    elif self.cache[snet][sta]["endtime"] == "-" or self.cache[snet][sta]["endtime"] > stock.now():
                        self.cache[snet][sta]["tags"].append("active")
                    else:
                        self.cache[snet][sta]["tags"].append("decommissioned")

                    # Adoption tag
                    if "adoption" in self.cache[snet][sta]:
                        self.cache[snet][sta]["tags"].append("adopted")

                    # Certification tag
                    if "cert_time" in self.cache[snet][sta]:
                        if (
                            self.cache[snet][sta]["cert_time"] == "-"
                            or self.cache[snet][sta]["cert_time"] > stock.now()
                        ):
                            self.cache[snet][sta]["tags"].append("uncertified")
                        elif (
                            self.cache[snet][sta]["decert_time"] == "-"
                            or self.cache[snet][sta]["decert_time"] < stock.now()
                        ):
                            self.cache[snet][sta]["tags"].append("certified")
                        else:
                            self.cache[snet][sta]["tags"].append("decertified")
Beispiel #53
0
            self.logger.debug( "dbname_template=%s" % dbname_template )

            try:
                volumes,net,time,endtime = db.getv("volumes","net","time","endtime")
            except Exception,e:
                raise DbcentralException("Problems with db.getv('volumes','net','time','endtime'). (%s)\n" % e)

            self.logger.debug( "volumes=%s" % volumes )
            self.logger.debug( "net=%s" % net )
            self.logger.debug( "time=%s" % time )
            self.logger.debug( "endtime=%s" % endtime )

            if endtime == null_endtime:
                # This will be problematic with realtime systems
                endtime = stock.now()

            self.logger.debug( "endtime=%s" % endtime )

            start_year  = int(stock.epoch2str(time,"%Y"))
            end_year    = int(stock.epoch2str(endtime,"%Y"))
            start_month = int(stock.epoch2str(time,"%L"))
            end_month   = int(stock.epoch2str(endtime,"%L"))

            if volumes == 'single':

                self._test_db(voltime,volendtime,dbname)

            elif volumes == 'year':

                for y in range(start_year,end_year+1):
Beispiel #54
0
def main():
    input = configure()
    
    #################################
    # Get the database origin and error information
    #################################
    
    fields = ['lat', 'lon', 'depth', 'time', 'lddate', 'ndef', 'review', 'ml', 'sdobs', 'smajax', 'sminax', 'strike', 'sdepth', 'stime']
    originInfo = {}
    
    db = ds.dbopen(input.dbname, "r")
    dbor = db.lookup("", "origin", "", "" )
    dbor = ds.dbsubset(dbor, "evid==%s" % int(input.evid))
    dbev = db.lookup("", "event", "", "" )
    dbor = ds.dbjoin(dbor, dbev)
    dbor = ds.dbsubset(dbor, "orid==prefor")
    dberr = db.lookup("", "origerr", "", "")
    dbor = ds.dbjoin(dbor, dberr)

    if dbor.query('dbRECORD_COUNT') < 1:
        sys.stderr.write('error: Database view contains no records\n')

    else:
        for i in range(dbor.query('dbRECORD_COUNT')):
            dbor[3] = i
            for f in fields:
                originInfo[f] = dbor.getv(f)[0]

    # Set if the origin has been reviewed or not
    
    if originInfo['review']=='y':
        originInfo['review']="manual"
    else:
        originInfo['review']="automatic"
    
    #################################
    # Begin writing the QuakeML document using the ElementTree package
    #################################
    
    # Populate the quakeml tag
    qml = ET.Element("q:quakeml")
    qml.set('xmlns', 'http://quakeml.org/xmlns/bed/1.2')
    qml.set('xmlns:catalog', 'http://anss.org/xmlns/catalog/0.1')
    qml.set('xmlns:q', 'http://quakeml.org/xmlns/quakeml/1.2')
    qml.set('xmlns:anss', 'http://anss.org/xmlns/event/0.1')
    
    # Populate the eventParameters tag
    eventParams = ET.SubElement(qml, "eventParameters")
    eventParams.set('publicID', 'quakeml:ak.anss.org/eventParameters/%s' % input.evid)
    
    # Populate the event tag
    event = ET.SubElement(eventParams, "event")
    event.set('publicID', 'quakeml:ak.anss.org/eventParameters/%s' % input.evid)
    event.set('catalog:datasource', 'ak')
    event.set('catalog:eventsource', 'ak')
    event.set('catalog:eventid', '%s' % input.evid)
    
    # Check if this is a delete QuakeML
    if input.print_delete:
        eventComment = ET.SubElement(event, "comment")
        eventText = ET.SubElement(eventComment, "text")
        eventText.text = "EVENT CANCELLED: "
        eventType = ET.SubElement(event, "type")
        eventType.text = "not existing"
        
        originInfo['lddate'] = stock.now()
    
    # Populate the magnitude tag
    if input.print_mag:
        
        magnitude = ET.SubElement(event, "magnitude")
        magnitude.set('publicID', 'quakeml:ak.anss.org/eventParameters/%s' % input.evid)
    
        mag = ET.SubElement(magnitude, "mag")
        magVal = ET.SubElement(mag, "value")
        magVal.text = "%s" % str(round(originInfo['ml'], 1))
    
        magType = ET.SubElement(magnitude, "type")
        magType.text = "Ml"
        
        magOrid = ET.SubElement(magnitude, "originID")
        magOrid.text = "quakeml:ak.anss.org/origin/%s" % input.evid
        
        magMethId = ET.SubElement(magnitude, "methodID")
        magMethId.text = "quakeml:anss.org/cube/magnitudeType/L"
        
        prefMagId = ET.SubElement(event, "preferredMagnitudeID")
        prefMagId.text = "quakeml:ak.anss.org/magnitude/%s/Ml" % input.evid
        
    # Populate the origin tag
    if input.print_origin:
        
        origin = ET.SubElement(event, "origin")
        origin.set('publicID', 'quakeml:ak.anss.org/eventParameters/%s' % input.evid)
        
        if input.print_arrival:
            dbassoc = db.lookup("","assoc","","")
            dbarr = db.lookup("","arrival","","")
            dbpicks = ds.dbjoin(dbor,dbassoc)
            dbpicks = ds.dbjoin(dbpicks, dbarr)
            
            arrFields = ['arid', 'sta', 'phase', 'delta', 'esaz', 'timeres', 'wgt', 'arrival.time', 'chan', 'arrival.lddate']
            arrInfo = {}
            
            for i in range(dbpicks.query('dbRECORD_COUNT')):
                dbpicks[3] = i
                for f in arrFields:
                    arrInfo[f] = dbpicks.getv(f)[0]
                
                # Write the arrival tag    
                arrival = ET.SubElement(origin, "arrival")
                arrival.set('publicID', 'quakeml:ak.anss.org/arrival/%s' % arrInfo['arid'])
                pickId = ET.SubElement(arrival, "pickID")
                pickId.text = "quakeml:ak.anss.org/arrival/%s" % arrInfo['arid']
                phase = ET.SubElement(arrival, "phase")
                phase.text = "%s" % arrInfo['phase']
                azimuth = ET.SubElement(arrival, "azimuth")
                azimuth.text = "%s" % arrInfo['esaz']
                distance = ET.SubElement(arrival, "distance")
                distance.text = "%s" % arrInfo['delta']
                timeRes = ET.SubElement(arrival, "timeResidual")
                timeRes.text = "%s" % arrInfo['timeres']
                timeWeight = ET.SubElement(arrival, "timeWeight")
                timeWeight.text = "%s" % arrInfo['wgt']
                arrCreationInfo = ET.SubElement(arrival, "creationInfo")
                arrAgencyId = ET.SubElement(arrCreationInfo, "agencyID")
                arrAgencyId.text = "ak"
                arrCreationTime = ET.SubElement(arrCreationInfo, "creationTime")
                arrCreationTime.text = "%s" % parse_time(arrInfo['arrival.lddate'])
                
                # Write the pick tag
                pick = ET.SubElement(event, "pick")
                pick.set('publicID', 'quakeml:ak.anss.org/arrival/%s' % arrInfo['arid'])
                pickTime = ET.SubElement(pick, "time")
                pickTimeVal = ET.SubElement(pickTime, "value")
                pickTimeVal.text = "%s" % parse_time(arrInfo['arrival.time'])
                pickWaveId = ET.SubElement(pick, "waveformID")
                pickWaveId.set('networkCode', "ak")
                pickWaveId.set('stationCode', "%s" % arrInfo['sta'])
                pickWaveId.set('channelCode', "%s" % arrInfo['chan'])
                pickWaveId.set('locationCode', "--")
                
                pickEvalMode = ET.SubElement(pick, "evaluationMode")
                pickEvalMode.text = "%s" % originInfo['review']
                
                pickCreationInfo = ET.SubElement(pick, "creationInfo")
                pickAgencyId = ET.SubElement(pickCreationInfo, "agencyID")
                pickAgencyId.text = "ak"
                pickCreationTime = ET.SubElement(pickCreationInfo, "creationTime")
                pickCreationTime.text = "%s" % parse_time(arrInfo['arrival.lddate'])
                
        
        origUncert = ET.SubElement(origin, "originUncertainty")
        minHorizUncert = ET.SubElement(origUncert, "minHorizontalUncertainty")
        minHorizUncert.text = "%s" % str(int(round((float(originInfo['sminax'])*1000), -2)))
        maxHorizUncert = ET.SubElement(origUncert, "maxHorizontalUncertainty")
        maxHorizUncert.text = "%s" % str(int(round((float(originInfo['smajax'])*1000), -2)))
        aziMaxHorizUncert = ET.SubElement(origUncert, "azimuthMaxHorizontalUncertainty")
        aziMaxHorizUncert.text = "%s" % originInfo['strike']
        prefDesc = ET.SubElement(origUncert, "preferredDescription")
        prefDesc.text = "uncertainty ellipse"
        
        origTime = ET.SubElement(origin, "time")
        origTimeVal = ET.SubElement(origTime, "value")
        origTimeVal.text = "%s" % parse_time(originInfo['time'])
        
        origLon = ET.SubElement(origin, "longitude")
        origLonVal = ET.SubElement(origLon, "value")
        origLonVal.text = "%s" % originInfo['lon']
        
        origLat = ET.SubElement(origin, "latitude")
        origLatVal = ET.SubElement(origLat, "value")
        origLatVal.text = "%s" % originInfo['lat']
        
        origDepth = ET.SubElement(origin, "depth")
        origDepthVal = ET.SubElement(origDepth, "value")
        origDepthVal.text = "%s" % str(int(round(float(originInfo['depth'])*1000, -2)))
        origDepthUncert = ET.SubElement(origDepth, "uncertainty")
        origDepthUncert.text = "%s" % str(int(round(float(originInfo['sdepth'])*1000, -2)))
        
        origMethId = ET.SubElement(origin, "methodID")
        origMethId.text = "quakeml:anss.org/cube/locationMethod/a"
        
        origQuality = ET.SubElement(origin, "quality")
        origNumPhase = ET.SubElement(origQuality, "usedPhaseCount")
        origNumPhase.text = "%s" % originInfo['ndef']
        origStdErr = ET.SubElement(origQuality, "standardError")
        origStdErr.text = "%s" % str(round(originInfo['sdobs'], 2))
        
        origEvalMode = ET.SubElement(origin, "evaluationMode")
        origEvalMode.text = "%s" % originInfo['review']
        
        prefOrigId = ET.SubElement(event, "preferredOriginID")
        prefOrigId.text = "quakeml:ak.anss.org/origin/%s" % input.evid
    
    
    # Populate various other QuakeML event tags
    eventCreationInfo = ET.SubElement(event, "creationInfo")
    agencyId = ET.SubElement(eventCreationInfo, "agencyID")
    agencyId.text = "ak"
    eventCreationTime = ET.SubElement(eventCreationInfo, "creationTime")
    eventCreationTime.text = "%s" % parse_time(originInfo['lddate'])
    eventVersion = ET.SubElement(eventCreationInfo, "version")
    eventVersion.text = "%s" % input.version
    
    # Creation Info tag
    evParamCreationInfo = ET.SubElement(eventParams, "creationInfo")
    evParamCreationTime = ET.SubElement(evParamCreationInfo, "creationTime")
    evParamCreationTime.text = "%s" % parse_time(originInfo['lddate'])
    
    #quakeml = ET.ElementTree(qml)
    pretty_quakeml = prettify(qml)
    
    #quakeml = ET.ElementTree(pretty_quakeml)
        
    #quakeml.write("quakeml.xml")
    
    db.close()
    print pretty_quakeml
Beispiel #55
0
    def _verify_cache(self):

        if (self.last_update + self.update_frequency ) < int( stock.now() ):
            self.logging.info( 'Need to update cache.' )
            self.update()
Beispiel #56
0
                continue


            if not self.packet.valid:
                self.logging.warning( '*** SKIP - INVALID PACKET ***' )
                continue

            # Test connection. Reset if missing
            if not self._test_orb():
                self._connect_to_orb()

            self.logging.debug( 'Put new packet in orb: %s' %  self.packet.pkt )
            pkttype, pktbuf, srcname, time = self.packet.pkt.stuff()
            self.packet.orbid = self.orb['orb'].putx( srcname, self.packet.time, pktbuf )

        self.lastread  = stock.now()

        #try:
        #    # REAP new packet from ORB
        #    self.packet.new(  self.orb['orb'].reap(self.reap_wait)  )

        #except orb.OrbIncompleteException, e:
        #    self.logging.debug("OrbIncompleteException orb.reap(%s)" % self.orbname)
        #    return True

        #except Exception,e:
        #    self.logging.warning("%s Exception in orb.reap(%s) [%s]" % (Exception,self.orbname,e))
        #    self.errors += 1
        #    return False

        #self.logging.debug("_extract_data(%s,%s)" % (self.packet.id, self.packet.time) )
Beispiel #57
0
    def render_uri(self, request):
        """Handle a generic request."""

        #
        # Clean and prep vars
        #
        response_meta = {}

        response_meta.update({
            "error": "false",
            "setupEvents": self.config.event,
            "setupUI": "false",
            "realtime": self.config.realtime,
            # "proxy_url":  self.config.proxy_url,
            "style": self.config.style,
            "meta_query": "false",
        })

        # if self.config.proxy_url: response_meta['proxy'] = "'" + self.config.proxy_url + "'"

        #
        # remove all empty  elements
        # This (localhost:8008/stations/) is the same as # (localhost:8008/stations)
        #
        path = request.prepath
        # print("Hello, world! I am located at %r." % (request.prepath))
        while True:
            try:
                path.remove(b"")
            except Exception:
                break

        # Parse all elements on the list
        query = self._parse_request(path)

        if b"precision" in request.args:
            query.update(
                {"precision": int(request.args[b"precision"][0].decode())})
        else:
            query.update({"precision": 1})

        if b"period" in request.args:
            query.update({"period": int(request.args[b"period"][0].decode())})
        else:
            query.update({"period": 0})

        if b"median" in request.args:
            test = request.args[b"median"][0].decode()
            if test.lower() in ("yes", "true", "t", "1"):
                query.update({"median": 1})
            else:
                query.update({"median": 0})
        else:
            query.update({"median": 0})

        if b"realtime" in request.args:
            test = request.args[b"realtime"][0].decode()
            if test.lower() in ("yes", "true", "t", "1"):
                query.update({"realtime": 1})
            else:
                query.update({"realtime": 0})
        else:
            query.update({"realtime": 0})

        if b"filter" in request.args:
            filter = request.args[b"filter"][0].decode()
            query.update({"filter": filter.replace("_", " ")})
        else:
            query.update({"filter": "None"})

        if b"calibrate" in request.args:
            test = request.args[b"calibrate"][0].decode()
            if test.lower() in ("yes", "true", "t", "1"):
                query.update({"calibrate": 1})
            else:
                query.update({"calibrate": 0})
        else:
            request.args.update({"calibrate": [self.config.apply_calib]})
            query.update({"calibrate": self.config.apply_calib})

        self.logger.debug(
            "QueryParser(): render_uri() request.prepath => path(%s)[%s]" %
            (len(path), path))
        self.logger.debug("QueryParser(): render_uri() query => [%s]" % query)

        if query["data"] is True:

            self.logger.debug('QueryParser(): render_uri() "data" query')

            if len(path) == 0:
                # ERROR: we need one option
                self.logger.error(
                    'QueryParser(): render_uri() ERROR: Empty "data" query!')
                return self.uri_results(request, "Invalid data query.")

            elif path[0] == b"events":
                """
                    Return events dictionary as JSON objects. For client ajax calls.
                    Called with or without argument.
                    """

                self.logger.debug(
                    "QueryParser(): render_uri() query => data => events")
                if self.config.event != "true":
                    return self.uri_results(request, {})

                elif len(path) == 2:
                    return self.uri_results(request,
                                            self.events(path[1].decode()))

                elif len(path) == 3:
                    return self.uri_results(
                        request,
                        self.events.phases(path[1].decode(), path[2].decode()))

                else:
                    return self.uri_results(request, self.events.table())

            elif path[0] == b"dates":
                """
                    Return list of yearday values for time in db
                    for all wfs in the cluster of dbs.
                    """

                self.logger.debug(
                    "QueryParser(): render_uri() query => data => dates")

                return self.uri_results(request, self.stations.dates())

            elif path[0] == b"stadates":
                """
                    Return list of yearday values for time in db
                    for all stations in the cluster of dbs.
                    """

                self.logger.debug(
                    "QueryParser(): render_uri() query => data => dates")

                if len(path) == 2:
                    return self.uri_results(
                        request, self.stations.stadates(path[1].decode()))

                if len(path) == 3:
                    return self.uri_results(
                        request,
                        self.stations.stadates(path[1].decode(),
                                               path[2].decode()),
                    )

                return self.uri_results(request, self.stations.stadates())

            elif path[0] == b"stations":
                """
                    Return station list as JSON objects. For client ajax calls.
                    Called with argument return dictionary
                    """

                self.logger.debug(
                    "QueryParser(): render_uri() query => data => stations")

                if len(path) == 2:
                    return self.uri_results(request,
                                            self.stations(path[1].decode()))

                return self.uri_results(request, self.stations.list())

            elif path[0] == b"channels":
                """
                    Return channels list as JSON objects. For client ajax calls.
                    """

                self.logger.debug(
                    "QueryParser(): render_uri() query => data => channels")

                if len(path) == 2:
                    stas = self.stations.convert_sta(
                        path[1].decode().split("|"))
                    return self.uri_results(request,
                                            self.stations.channels(stas))

                return self.uri_results(request, self.stations.channels())

            elif path[0] == b"now":
                """
                    Return JSON object for epoch(now).
                    """

                self.logger.debug(
                    "QueryParser(): render_uri() query => data => now")

                return self.uri_results(request, [stock.now()])

            elif path[0] == b"filters":
                """
                    Return list of filters as JSON objects. For client ajax calls.
                    """

                self.logger.debug(
                    "QueryParser(): render_uri() query => data => filters %s" %
                    self.config.filters)

                return self.uri_results(request, self.config.filters)

            elif path[0] == b"wf":
                """
                    Return JSON object of data. For client ajax calls.
                    """

                self.logger.debug(
                    "QueryParser(): render_uri(): get_data(%s))" % query)

                return self.uri_results(request, self.get_data(query))

            elif path[0] == b"coverage":
                """
                    Return coverage tuples as JSON objects. For client ajax calls.
                    """
                self.logger.debug("QueryParser(): render_uri(): Get coverage")

                query.update({"coverage": 1})

                return self.uri_results(request, self.get_data(query))

            else:
                # ERROR: Unknown query type.
                return self.uri_results(request,
                                        "Unknown query type:(%s)" % path)

        response_meta.update(self.tvals)

        if (not path) or len(path) == 0:
            return self.uri_results(
                request, self.root_template.safe_substitute(response_meta))

        response_meta["meta_query"] = {}
        response_meta["meta_query"]["sta"] = query["sta"]
        response_meta["meta_query"]["chan"] = query["chan"]
        response_meta["meta_query"]["time_start"] = query["start"]
        response_meta["meta_query"]["time_end"] = query["end"]
        response_meta["meta_query"]["page"] = query["page"]

        if request.args:
            response_meta["setupUI"] = json.dumps(request.args)

        response_meta["meta_query"] = json.dumps(
            str(response_meta["meta_query"]))

        if path[0] == b"wf":
            return self.uri_results(
                request, self.root_template.safe_substitute(response_meta))

        elif path[0] == b"plot":
            return self.uri_results(
                request, self.plot_template.safe_substitute(response_meta))

        return self.uri_results(request, "Invalid query.")
Beispiel #58
0
    def _get_event_cache(self):
        # private function to load the data from the tables

        self.logger.info("Events(): update cache")

        for dbname in self.dbcentral.list():

            self.logger.debug("Events(): dbname: %s" % dbname)

            # Get min max for wfdisc table first
            try:
                db = datascope.dbopen(dbname, "r")
                db = db.lookup(table="wfdisc")
                start = db.ex_eval("min(time)")
                end = db.ex_eval("max(endtime)")
                if end > stock.now():
                    end = stock.now()
                records = db.query(datascope.dbRECORD_COUNT)

            except Exception:
                records = 0

            if records:

                if not self.start:
                    self.start = start

                elif self.start > start:
                    self.start = start

                if not self.end:
                    self.end = end

                elif self.end < end:
                    self.end = end

            try:
                db.close()
            except Exception:
                pass

            try:
                db = datascope.dbopen(dbname, "r")
                db = db.lookup(table="event")
                records = db.query(datascope.dbRECORD_COUNT)

            except Exception:
                records = 0

            if records:

                try:
                    db = db.join("origin")
                    db = db.subset("orid == prefor")
                except Exception:
                    pass

            else:

                try:
                    db = db.lookup(table="origin")
                except Exception:
                    pass

            try:
                records = db.query(datascope.dbRECORD_COUNT)
            except Exception:
                records = 0

            if not records:
                self.logger.error("Events(): No records to work on any table")
                continue

            self.logger.debug(
                "Events(): origin db_pointer: [%s,%s,%s,%s]"
                % (db["database"], db["table"], db["field"], db["record"])
            )

            try:
                db = db.subset("time > %f" % self.start)
                db = db.subset("time < %f" % self.end)
            except Exception:
                pass

            try:
                records = db.query(datascope.dbRECORD_COUNT)
            except Exception:
                records = 0

            if not records:
                self.logger.error("Events(): No records after time subset")
                continue

            for i in range(records):

                db.record = i

                (orid, time, lat, lon, depth, auth, mb, ml, ms, nass) = db.getv(
                    "orid",
                    "time",
                    "lat",
                    "lon",
                    "depth",
                    "auth",
                    "mb",
                    "ml",
                    "ms",
                    "nass",
                )

                if auth == self.nulls("auth"):
                    auth = "-"

                if orid == self.nulls("orid"):
                    orid = "-"

                if time == self.nulls("time"):
                    time = "-"
                else:
                    time = "%0.2f" % time

                if lat == self.nulls("lat"):
                    lat = "-"
                else:
                    lat = "%0.2f" % lat

                if lon == self.nulls("lon"):
                    lon = "-"
                else:
                    lon = "%0.2f" % lon

                if depth == self.nulls("depth"):
                    depth = "-"
                else:
                    depth = "%0.2f" % depth

                if mb == self.nulls("mb"):
                    mb = "-"
                else:
                    mb = "%0.1f" % mb

                if ms == self.nulls("ms"):
                    ms = "-"
                else:
                    ms = "%0.1f" % ms

                if ml == self.nulls("ml"):
                    ml = "-"
                else:
                    ml = "%0.1f" % ml

                if nass == self.nulls("nass"):
                    nass = "-"
                else:
                    nass = "%d" % nass

                self.event_cache[orid] = {
                    "time": time,
                    "lat": lat,
                    "lon": lon,
                    "depth": depth,
                    "auth": auth,
                    "mb": mb,
                    "ms": ms,
                    "ml": ml,
                    "nass": nass,
                }

                if mb > 0:
                    self.event_cache[orid]["magnitude"] = mb
                    self.event_cache[orid]["mtype"] = "Mb"
                elif ms > 0:
                    self.event_cache[orid]["magnitude"] = ms
                    self.event_cache[orid]["mtype"] = "Ms"
                elif ml > 0:
                    self.event_cache[orid]["magnitude"] = ml
                    self.event_cache[orid]["mtype"] = "Ml"
                else:
                    self.event_cache[orid]["magnitude"] = "-"
                    self.event_cache[orid]["mtype"] = "-"

            try:
                db.close()
            except Exception:
                pass

        self.logger.info("Events(): Done updating cache. (%s)" % len(self.event_cache))

        self.logger.debug("Events(): %s" % self.event_cache.keys())
Beispiel #59
0
    def _set_tags( self ):
        """
        TA array expands into multiple geographical regions.
        We need to add some quick identifier to the data blob.
        """
        for snet in self.cache:
            if not snet: continue
            for sta in self.cache[snet]:
                if not sta: continue

                self.logging.debug( "_set_tags(%s_%s)" % (snet,sta) )

                if self._verify_cache(snet,sta,'tags'):
                    try:
                        if len(self.cache[snet][sta]['tags']) < 1: raise
                    except:
                        self.cache[snet][sta]['tags'] = []

                    # Tags for sites on the YUKON area
                    if self.cache[snet][sta]['lat'] > 58.0 and \
                            self.cache[snet][sta]['lon'] > -147.0:
                        self.cache[snet][sta]['tags'].append( 'yukon' )

                    # Tags for TA **ONLY**
                    if snet == 'TA':
                        self.cache[snet][sta]['tags'].append( 'usarray' )

                        if self.cache[snet][sta]['vnet'] == '_CASCADIA-TA':
                            self.cache[snet][sta]['tags'].append( 'cascadia' )

                        if self.cache[snet][sta]['lat'] > 50:
                            self.cache[snet][sta]['tags'].append( 'alaska' )
                        else:
                            self.cache[snet][sta]['tags'].append( 'low48' )

                        # Need to identify active BGAN connections
                        bgantag = 'non-bgan'
                        try:
                            for c in self.cache[snet][sta]['comm']:
                                # active?
                                if c['endtime'] == '-':
                                    # BGAN?
                                    if c['commtype'] == 'BGAN':
                                        # matched
                                        bgantag = 'bgan'
                        except:
                            pass

                        # Add BGAN results
                        self.cache[snet][sta]['tags'].append( bgantag )

                    # Activity tag
                    if self.cache[snet][sta]['time'] == '-' or \
                            self.cache[snet][sta]['time'] > stock.now():
                        self.cache[snet][sta]['tags'].append( 'prelim' )
                    elif self.cache[snet][sta]['endtime'] == '-' or \
                            self.cache[snet][sta]['endtime'] > stock.now():
                        self.cache[snet][sta]['tags'].append( 'active' )
                    else:
                        self.cache[snet][sta]['tags'].append( 'decommissioned' )

                    # Adoption tag
                    if 'adoption' in self.cache[snet][sta]:
                        self.cache[snet][sta]['tags'].append( 'adopted' )

                    # Certification tag
                    if 'cert_time' in self.cache[snet][sta]:
                        if self.cache[snet][sta]['cert_time'] == '-' or \
                                self.cache[snet][sta]['cert_time'] > stock.now():
                            self.cache[snet][sta]['tags'].append( 'uncertified' )
                        elif self.cache[snet][sta]['decert_time'] == '-' or \
                                self.cache[snet][sta]['decert_time'] < stock.now():
                            self.cache[snet][sta]['tags'].append( 'certified' )
                        else:
                            self.cache[snet][sta]['tags'].append( 'decertified' )