Example #1
0
    def summarize(self):
        """
        Return summary of all QC issues for network.

        Behaviour:
        Create and return a summary of all QC issues pertaining to
        network.

        Return Values:
        <str> containing summary

        """
        import sys
        import os
        sys.path.append('%s/data/python' % os.environ['ANTELOPE'])
        from antelope.stock import epoch2str
        sys.path.remove('%s/data/python' % os.environ['ANTELOPE'])
        summary = 'QC Report for %s\n' % self.network
        summary = '%s%s - %s\n\n' % (summary, epoch2str(self.tstart, \
            '%m/%d/%Y %H:%M:%S'), epoch2str(self.tend,'%m/%d/%Y %H:%M:%S'))
        if len(self.qc_station_reports) == 0:
            summary = "%s\n\nNo quality control issues.\n" % summary
        for qc_station_report in self.qc_station_reports:
            summary = '%s%s' % (summary, qc_station_report.summarize())
        return summary
Example #2
0
    def _get_dlevents(self):

        self.logging.debug( "_get_dlevents()")
        self.cache = []
        self.error_cache = []

        steps = [ 'dbopen netperf']

        fields = ['snet','time','npsta','perf']

        for db in self.db:
            for v in extract_from_db( db, steps, fields, self.db_subset):

                try:
                    v['perf'] = int( v['perf'] )
                    v['time'] = int( v['time'] )

                    v['year'] = int( stock.epoch2str(v['time'], '%Y', 'UTC') )
                    v['jday'] = int( stock.epoch2str(v['time'], '%j', 'UTC') )

                    v['id'] = '%s_%s_%s' % (v['snet'], v['jday'], v['year'])

                    self.logging.debug('netperf() => %s, %s)' % (v['id'], v['perf']) )

                    self.cache.append( v )
                except Exception, e:
                    self.logging.complain('netperf() => parse exception)' )
                    v['exception'] = Exception
                    v['error'] = e

                    self.logging.complain( v )

                    self.error_cache.append( v )
Example #3
0
    def summarize(self):
        """
        Return summary of all QC issues for network.

        Behaviour:
        Create and return a summary of all QC issues pertaining to
        network.

        Return Values:
        <str> containing summary

        """
        import sys
        import os

        sys.path.append("%s/data/python" % os.environ["ANTELOPE"])
        from antelope.stock import epoch2str

        sys.path.remove("%s/data/python" % os.environ["ANTELOPE"])
        summary = "QC Report for %s\n" % self.network
        summary = "%s%s - %s\n\n" % (
            summary,
            epoch2str(self.tstart, "%m/%d/%Y %H:%M:%S"),
            epoch2str(self.tend, "%m/%d/%Y %H:%M:%S"),
        )
        if len(self.qc_station_reports) == 0:
            summary = "%s\n\nNo quality control issues.\n" % summary
        for qc_station_report in self.qc_station_reports:
            summary = "%s%s" % (summary, qc_station_report.summarize())
        return summary
Example #4
0
    def _get_netperf(self):
        self.logging.debug("_get_netperf()")
        self.cache = []
        self.error_cache = []

        steps = ["dbopen netperf"]

        fields = ["snet", "time", "npsta", "perf"]

        for db in self.db:
            for v in extract_from_db(db, steps, fields, self.db_subset):

                try:
                    v["perf"] = int(v["perf"])
                    v["time"] = int(v["time"])

                    v["year"] = int(stock.epoch2str(v["time"], "%Y", "UTC"))
                    v["jday"] = int(stock.epoch2str(v["time"], "%j", "UTC"))

                    v["id"] = "%s_%s_%s" % (v["snet"], v["jday"], v["year"])

                    self.logging.debug("netperf() => %s, %s)" %
                                       (v["id"], v["perf"]))

                    self.cache.append(v)
                except Exception as e:
                    self.logging.complain("netperf() => parse exception)")
                    v["exception"] = Exception
                    v["error"] = e

                    self.logging.complain(v)

                    self.error_cache.append(v)
Example #5
0
    def _get_detections(self):
        '''
        Open detection table and get all data for current event.

        TODO: Deal with mopping up of detections not associated with events
        '''
        if not self.table_present['detection']:
            return

        start = int(stock.now())
        end = 0

        self.logger.debug('Basing detection window on arrivals for evid [%d]'
                          % self.evid)
        for orid in self.origins.keys():
            arrivals = self.all_arrivals(orid=orid)
            for arrival in arrivals:

                if 'arrival.time' in arrival:
                    time = arrival['arrival.time']
                else:
                    time = start

                if 'arrival.deltim' in arrival:
                    deltim = arrival['arrival.deltim']
                else:
                    deltim = 0

                if time - deltim < start:
                    start = time - deltim

                if time + deltim > end:
                    end = time + deltim

        if end > start:
            self.logger.debug('Windowing detections from %s to %s'
                              % (stock.epoch2str(start, '%G %T')[:-4],
                                 stock.epoch2str(end, '%G %T')[:-4]))
            steps = ['dbopen detection']
            steps += ['dbsubset time>%s && time<%s' % (start, end)]
            steps += ['dbsubset state=~/%s/' % state
                      for state in self.detection_state_select]
            steps += ['dbsubset state!~/%s/' % state
                      for state in self.detection_state_reject]
            steps += self._seed_channel_steps()

            self.detections.get_view(steps)

        else:
            self.logger.warning(
                'Calculated time-window for detections is not valid: [%s,%s]' %
                (start, end))
Example #6
0
 def month_resolver(self, start_mth, start_yr, end_mth, end_yr, dbname_template):
     """Month list
     """
     if self.include_times:
         months = {}
     else:
         months = []
     vol_month = start_mth
     vol_year = start_yr
     while vol_year <= end_yr or (vol_year == end_yr and vol_month <= end_mth):
         voltime = antstock.str2epoch("%d/1/%d" % (vol_month, vol_year))
         if vol_month < 12:
             vol_month += 1
         else:
             vol_month = 1
             vol_year += 1
         volendtime = antstock.str2epoch("%d/1/%d" % (vol_month,vol_year)) - 1
         dbname = antstock.epoch2str(int(voltime), dbname_template)
         if self.include_times:
             months[dbname] = (voltime,volendtime)
         else:
             months.append(dbname)
         #if os.path.exists(dbname) and os.path.isfile(dbname):
         #    if self.include_times:
         #        months[dbname] = (voltime,volendtime)
         #    else:
         #        months.append(dbname)
         #else:
         #    antelog.notify("Dbpath '%s' does not exist." % dbname)
     return months
Example #7
0
    def new( self, rawpkt ):

        if not rawpkt[0] or int(float(rawpkt[0])) < 1:
            self.logging.info( 'Bad Packet: %s %s %s' % (rawpkt[0], rawpkt[1], rawpkt[2] ) )
            return

        self._clean()

        self.rawpkt = rawpkt

        self.logging.debug( rawpkt )

        self.id = rawpkt[0]
        self.time = float( rawpkt[2] )
        self.strtime = stock.epoch2str( self.time, '%D %H:%M:%S %Z' ).strip()

        # Try to extract information from packet
        pkt = Pkt.Packet( rawpkt[1], rawpkt[2], rawpkt[3] )

        self.srcname = pkt.srcname if pkt.srcname else rawpkt[1]

        self.logging.info( '%s %s %s' % (self.id, self.time, self.strtime) )
        #self.logging.debug( pkt.pf )

        if pkt.pf.has_key('dls'):
            self.dls = pkt.pf['dls']
            self.valid = True
            self.__str__()
        else:
            self.dls = {}
            self.valid = False
Example #8
0
    def send(self):
        """Send network report to appropriate e-mail addresses.

        Behaviour:
        Send network report to appropriate e-mail addresses via SMTP
        server

        """
        import sys
        import os

        sys.path.append("%s/data/python" % os.environ["ANTELOPE"])
        from antelope.stock import epoch2str, now

        sys.path.remove("%s/data/python" % os.environ["ANTELOPE"])
        import smtplib

        sender = "auto_qc-noreply@%s" % self.smtp_server
        from_line = "From: %s\n" % sender
        to_line = "To: %s" % self.email[0]
        for rec in self.email[1:]:
            to_line = "%s, %s " % (to_line, rec)
        to_line = "%s\n" % to_line
        subject_line = "Subject: auto_qc network report for %s %s\n" % (
            self.network,
            epoch2str(now(), "%m/%d/%Y"),
        )
        message = "%s%s%s%s" % (from_line, to_line, subject_line, self.summarize())
        try:
            smtpObj = smtplib.SMTP(self.smtp_server)
            smtpObj.sendmail(sender, self.email, message)
            print("Network summary successfully sent.")
        except smtplib.SMTPException:
            print("Error: unable to send e-mail.\n\n")
            print(self.summarize())
Example #9
0
    def send(self):
        """Send network report to appropriate e-mail addresses.

        Behaviour:
        Send network report to appropriate e-mail addresses via SMTP
        server

        """
        import sys
        import os
        sys.path.append('%s/data/python' % os.environ['ANTELOPE'])
        from antelope.stock import epoch2str, now
        sys.path.remove('%s/data/python' % os.environ['ANTELOPE'])
        import smtplib
        sender = 'auto_qc-noreply@%s' % self.smtp_server
        from_line = 'From: %s\n' % sender
        to_line = 'To: %s' % self.email[0]
        for rec in self.email[1:]:
            to_line = '%s, %s ' % (to_line, rec)
        to_line = '%s\n' % to_line
        subject_line = 'Subject: auto_qc network report for %s %s\n' % \
            (self.network, epoch2str(now(),'%m/%d/%Y'))
        message = '%s%s%s%s' % (from_line, to_line, subject_line,
            self.summarize())
        try:
            smtpObj = smtplib.SMTP(self.smtp_server)
            smtpObj.sendmail(sender, self.email,message)
            print 'Network summary successfully sent.'
        except smtplib.SMTPException:
            print 'Error: unable to send e-mail.\n\n'
            print self.summarize()
Example #10
0
def readable_time(time, tformat="%D (%j) %H:%M:%S %z", tzone="UTC"):
    # Make want to save a readable copy
    # of the epoch times.
    try:
        if parse_sta_time(time) == "-":
            raise
        return stock.epoch2str(time, tformat, tzone)
    except Exception, e:
        return "-"
Example #11
0
def readable_time(time, tformat="%D (%j) %H:%M:%S %z", tzone="UTC"):
    """Format epoch time in a human readable format."""

    try:
        if parse_sta_time(time) == "-":
            raise
        return stock.epoch2str(time, tformat, tzone)
    except Exception:
        return "-"
Example #12
0
def get3Ctr(wf_db, sta, chan3, tstart, tend):
    """
    Input: db wfdisc pointer, station name, 3 channels, start,end
    Output: Obspy Stream with 3 waveform traces (segments)
    """
    logger = logging.getLogger('dbarrival_params')
    st3c = Stream()
    if chan3 == None:
        return None
    for chan in chan3:
        cline = "{}:{} {} - {}".format(sta, chan,
                                        stock.epoch2str(tstart,
                                                "%D %H:%M:%S.%s"),
                                        stock.epoch2str(tend,
                                                "%D %H:%M:%S.%s"))
        logger.debug("get3Ctr: Getting data for {}".format(cline))
        with ds.trdestroying(wf_db.trloadchan(tstart, tend, sta, chan)) as tr:
            if tr.record_count == 0:
                logger.warning("get3Ctr: Could not load data for {}".format(cline))
                return None
            #tr.trfilter(pfile['filter'])
            tr.record = 0
            try:
                time, samprate = tr.getv('time', 'samprate')
            except DbgetvError:
                logger.warning("get3Ctr: Could not get value 'samprate' for {}".format(cline))
                return None
            data = []
            for segment in tr.iter_record():
                tmp_data = list(segment.trdata())
                data += list(segment.trdata())
            tr.trfree()
        data = array(data)
        stats = {'station': sta, 'channel': chan, 'sampling_rate': samprate}
        stats['starttime'] = UTCDateTime(time)
        otr = Trace(data=data, header=stats)
        st3c += otr
    ns = len(st3c[0].data)
    # TODO: write subroutine to check ns, if close (w/i a few samples) try again
    if len(st3c[1].data) != ns or len(st3c[2].data) != ns:
        logger.warning('get3Ctr: {0}'.format(cline))
        logger.warning('get3Ctr: Length of data arrays not equal {0} {1} {2}'.format(len(st3c[0].data),len(st3c[1].data),len(st3c[2].data)))
        return None
    return st3c
Example #13
0
    def _get_dlevents(self):

        self.logging.debug("_get_dlevents()")
        self.cache = []
        self.error_cache = []

        steps = ["dbopen dlevent"]

        fields = ["dlname", "dlevtype", "dlcomment", "time"]

        for v in extract_from_db(self.db, steps, fields, self.db_subset):

            self.logging.debug("dlevent(%s)" % (v["dlname"]))
            snet, sta = v["dlname"].split("_", 1)
            v["snet"] = snet
            v["sta"] = sta

            v["year"] = stock.epoch2str(v["time"], "%Y", "UTC")
            v["month"] = stock.epoch2str(v["time"], "%L", "UTC")

            self.cache.append(v)
Example #14
0
    def _get_dlevents(self):

        self.logging.debug( "_get_dlevents()")
        self.cache = []
        self.error_cache = []

        steps = [ 'dbopen dlevent']

        fields = ['dlname','dlevtype','dlcomment','time']

        for v in extract_from_db(self.db, steps, fields, self.db_subset):

            self.logging.debug('dlevent(%s)' % (v['dlname']) )
            snet,sta = v['dlname'].split('_',1)
            v['snet'] = snet
            v['sta'] = sta

            v['year'] = stock.epoch2str(v['time'], '%Y', 'UTC')
            v['month'] = stock.epoch2str(v['time'], '%L', 'UTC')

            self.cache.append( v )
 def __init__(self, db, evid): #Mal, this shouldn't take evid as an argument
     with closing(dbopen(db, 'r')) as db:
         view = db.schema_tables['origin']
         view = view.subset('evid == %s' % evid)
         view = view.subset('orid == prefor')
         view = view.join('netmag', outer=True)
         evid, time, lat, lon, depth, mag =  view.getv('evid', 'time', 'lat',
             'lon', 'depth', 'magnitude')
         year = int(epoch2str(time, '%Y'))
         month = int(epoch2str(time, '%m'))
         day = int(epoch2str(time, '%d'))
         hour = int(epoch2str(time, '%H'))
         minute = int(epoch2str(time, '%M'))
         second = float(epoch2str(time, '%S.%s'))
         self.append({'id': evid, 'year': year, 'month': month, 'day': day,
             'hour': hour, 'min': minute, 'sec': second, 'lat': lat,
             'lon': lon, 'depth': depth, 'mag': mag, 'arrivals': []})
         view = view.join('assoc')
         view = view.join('arrival')
         for record in view.iter_record():
             sta, arr_time, timeres, phase = record.getv('sta',
                 'arrival.time', 'timeres', 'phase')
             ttime = arr_time - time
             self[-1]['arrivals'].append({'staname': sta, 'ttime': ttime,
                 'qual': timeres, 'phase': phase}) 
Example #16
0
    def new(self, rawpkt):
        """Create a new Poc object, allowing reuse of an orb.Packet reference.

        This method is a hack around the memory leaks incurred by allocating a
        new orb.Packet structure repeatedly. Instead of allocating a new Pkt
        object and letting the garbage collector come around and free memory,
        we just repeatedly reuse the same object.
        """

        self._clean()

        self.rawpkt = rawpkt

        if not rawpkt[0] or int(float(rawpkt[0])) < 1:
            return

        self.id = rawpkt[0]
        self.time = rawpkt[2]

        # Try to extract information from packet
        pkt = Pkt.Packet(rawpkt[1], rawpkt[2], rawpkt[3])

        self.srcname = pkt.srcname if pkt.srcname else rawpkt[1]

        # Step around Antelope TypeError by explicitly calling
        # .keys() on the ParameterFile object.
        if "sn" in pkt.pf.keys():
            self.sn = pkt.pf["sn"]
        else:
            return

        if "srcip" in pkt.pf.keys():
            self.srcip = pkt.pf["srcip"]
        else:
            return

        if "time" in pkt.pf.keys():
            self.poctime = float(pkt.pf["time"])
            self.strtime = stock.epoch2str(self.poctime,
                                           "%D %H:%M:%S %Z").strip()
        else:
            return

        self.valid = True

        # Maybe we have some extra data...
        if "pocc2" in pkt.pf.keys():
            self.pocc2 = pkt.pf["pocc2"]
        else:
            self.pocc2 = {}
Example #17
0
    def __init__(
        self,
        width,
        height,
        result,
        reference,
        ref_sta,
        ref_chan,
        sta,
        start,
        end,
        result_dir,
        debug_plot,
        orid=None,
    ):
        """Initialize Plot class."""
        total = len(result)
        self.width = width
        self.height = height * total
        fig = plt.figure(figsize=(width, height))
        axs = [
            fig.add_subplot(3 * total, 3, j)
            for j in range(1, (3 * 3 * total) + 1)
        ]

        plt.tight_layout()
        fig.subplots_adjust(top=0.9, bottom=0.05)

        self.plot_data(axs, result, reference, ref_sta, ref_chan, sta, start,
                       end)

        if debug_plot:
            plt.show()
        else:
            if not orid:
                filename = "%s_%s_%s.pdf" % (
                    ref_sta,
                    sta,
                    epoch2str(start, "%Y%j_%H_%M_%S.%s"),
                )
            else:
                filename = "%s_%s_%s.pdf" % (ref_sta, sta, orid)

            path = "/".join([result_dir, filename])
            if not os.path.exists(result_dir):
                os.makedirs(result_dir)

            fig.savefig(path, bbox_inches="tight", pad_inches=0.5, dpi=100)
Example #18
0
    def new( self, rawpkt ):

        self._clean()

        self.rawpkt = rawpkt

        if not rawpkt[0] or int(float(rawpkt[0])) < 1:
            return

        self.id = rawpkt[0]
        self.time = rawpkt[2]

        # Try to extract information from packet
        pkt = Pkt.Packet( rawpkt[1], rawpkt[2], rawpkt[3] )

        self.srcname = pkt.srcname if pkt.srcname else rawpkt[1]

        if pkt.pf.has_key('sn'):
            self.sn = pkt.pf['sn']
        else:
            return

        if pkt.pf.has_key('srcip'):
            self.srcip = pkt.pf['srcip']
        else:
            return

        if pkt.pf.has_key('time'):
            self.poctime = float(pkt.pf['time'])
            self.strtime = stock.epoch2str( self.poctime, '%D %H:%M:%S %Z' ).strip()
        else:
            return

        self.valid = True

        # Maybe we have some extra data...
        if pkt.pf.has_key('pocc2'):
            self.pocc2 = pkt.pf['pocc2']
        else:
            self.pocc2 = {}
Example #19
0
 def year_resolver(self, start_yr, end_yr, dbname_template):
     """Year list """
     if self.include_times:
         years = {}
     else:
         years = []
     for y in range(start_yr, end_yr + 1):
         voltime = antstock.str2epoch("1/1/%s 00:00:00" % y)
         volendtime = antstock.str2epoch("12/31/%s 23:59:59" % y)
         dbname = antstock.epoch2str(voltime, dbname_template)
         if self.include_times:
             years[dbname] = (voltime,volendtime)
         else:
             years.append(dbname)
         #if os.path.exists(dbname) and os.path.isfile(dbname):
         #    if self.include_times:
         #        years[dbname] = (voltime,volendtime)
         #    else:
         #        years.append(dbname)
         #else:
         #    antelog.notify("Dbpath '%s' does not exist." % dbname)
     return years
Example #20
0
 def _init_from_db(self, db, evid):
     """
     Initialize Event object using a CSS3.0 database as input.
     """
     if evid == None: raise(Exception('No \'evid\' supplied. Could not '
         'initialize Event object from CSS3.0 database.'))
     with closing(dbopen(db, 'r')) as db:
         view = db.schema_tables['event']
         view = view.join('origin')
         view = view.subset('evid == %s' % evid)
         view = view.subset('orid == prefor')
         #If for some reason this subset is empty, just take the first
         #solution as preferred. EG. prefor field is unitialized.
         if view.record_count == 0:
             view = db.schema_tables['origin']
             view = db.schema_tables['event']
             view = view.join('origin')
             view = view.subset('evid == %s' % evid)
         view = view.join('netmag', outer=True)
         view.record = 0
         evid, time, lat, lon, depth, mag, magtype =  view.getv('evid',
             'time', 'lat', 'lon', 'depth', 'magnitude', 'magtype')
         self.evid       = evid
         self.time       = time
         self.lat        = lat
         self.lon        = lon
         self.depth      = depth
         self.mag        = mag
         self.magtype    = magtype
         self.year       = int(epoch2str(time, '%Y'))
         self.month      = int(epoch2str(time, '%m'))
         self.day        = int(epoch2str(time, '%d'))
         self.hour       = int(epoch2str(time, '%H'))
         self.minute     = int(epoch2str(time, '%M'))
         self.second     = float(epoch2str(time, '%S.%s'))
         view = view.join('assoc')
         view = view.join('arrival')
         arrivals = [ record.getv('sta',
                                  'arrival.time',
                                  'phase') \
                                  + (None, ) \
                                  for record in view.iter_record()
                    ]
         self.arrivals = [ Phase(sta, time, phase, qual)
                         for sta, time, phase, qual in arrivals
                         ]
Example #21
0
def get_chan3(dbsite_chan, sta, chan, time):
    """
    Return the appropriate 3 channel tuple for 3 component
    stations.
    """
    logger = logging.getLogger('dbarrival_params')
    scsub = "sta =~ /{}/ && chan =~ /{}.{}/ "\
                    "&& ondate < _{}_ && (offdate > _{}_ || "\
                    "offdate == -1)".format(sta,
                                            chan[:2],
                                            chan[3:],
                                            time,
                                            time)
    logger.debug('get_chan3: scsub {}'.format(scsub))
    view = dbsite_chan.subset("sta =~ /{}/ && chan =~ /{}.{}/ "\
                    "&& ondate < _{}_ && (offdate > _{}_ || "\
                    "offdate == -1)".format(sta,
                                            chan[:2],
                                            chan[3:],
                                            time,
                                            time))
    if view.record_count != 3:
        logger.warning("get_chan3: Could not determine appropriate chan3 for "\
                        "{}:{} at {}".format(sta, chan,
                            stock.epoch2str(time, "%Y%j %H:%M:%S.%s")))
        view.free()
        return None
    chan3, ondates, offdates = [], [], []
    for record in view.iter_record():
        chan, ondate, offdate = record.getv('chan', 'ondate', 'offdate')
        chan3 += [chan]
        ondates += [ondate]
        offdates += [offdate]
    chan3 = tuple(sort_chans(chan3))
    ondate = max(ondates)
    offdate = min(offdates)
    return chan3
Example #22
0
 def day_resolver(self, start_dy, end_dy, dbname_template):
     """Day list
     """
     if self.include_times:
         days = {}
     else:
         days = []
     while start_dy <= end_dy:
         voltime = antstock.epoch(start_day)
         volendtime = voltime + 86399 # one second less than a full day
         dbname = antstock.epoch2str(voltime, dbname_template)
         if self.include_times:
             days[dbname] = (voltime, volendtime)
         else:
             days.append(dbname)
         #if os.path.exists(dbname) and os.path.isfile(dbname):
         #    if self.include_times:
         #        days[dbname] = (voltime, volendtime)
         #    else:
         #        days.append(dbname)
         #else:
         #    antelog.notify("Dbpath '%s' does not exist." % dbname)
         start_dy = yearday((antstock.epoch(start_dy) + 86400))
     return days
Example #23
0
    def _init_from_aux(self, time, lat, lon, dpeth, mag, magtype=None, evid=None):
        """
        Initialize Event object using an auxiliary read function.

        Auxiliary read function must parse flat file and pass parameters 
        to Event contsructor.
        """
        self.time = time
        self.lat        = lat
        self.lon        = lon
        self.depth      = depth
        self.mag        = mag
        self.magtype    = magtype
        self.year = int(epoch2str(time, '%Y'))
        self.month = int(epoch2str(time, '%m'))
        self.day = int(epoch2str(time, '%d'))
        self.hour = int(epoch2str(time, '%H'))
        self.minute = int(epoch2str(time, '%M'))
        self.second = float(epoch2str(time, '%S.%s'))
        self.arrivals = phase_list
Example #24
0
 def __init__(self, db,
              evid):  #Mal, this shouldn't take evid as an argument
     with closing(dbopen(db, 'r')) as db:
         view = db.schema_tables['origin']
         view = view.subset('evid == %s' % evid)
         view = view.subset('orid == prefor')
         view = view.join('netmag', outer=True)
         evid, time, lat, lon, depth, mag = view.getv(
             'evid', 'time', 'lat', 'lon', 'depth', 'magnitude')
         year = int(epoch2str(time, '%Y'))
         month = int(epoch2str(time, '%m'))
         day = int(epoch2str(time, '%d'))
         hour = int(epoch2str(time, '%H'))
         minute = int(epoch2str(time, '%M'))
         second = float(epoch2str(time, '%S.%s'))
         self.append({
             'id': evid,
             'year': year,
             'month': month,
             'day': day,
             'hour': hour,
             'min': minute,
             'sec': second,
             'lat': lat,
             'lon': lon,
             'depth': depth,
             'mag': mag,
             'arrivals': []
         })
         view = view.join('assoc')
         view = view.join('arrival')
         for record in view.iter_record():
             sta, arr_time, timeres, phase = record.getv(
                 'sta', 'arrival.time', 'timeres', 'phase')
             ttime = arr_time - time
             self[-1]['arrivals'].append({
                 'staname': sta,
                 'ttime': ttime,
                 'qual': timeres,
                 'phase': phase
             })
Example #25
0
    def _get_list(self):
        try:
            db = datascope.dbopen(self.path, "r")
        except Exception as e:
            raise DbcentralException("Cannot open database %s (%s)" % (self.path, e))

        try:
            db = db.lookup("", "clusters", "", "")
        except datascope.DblookupFieldError:
            self.type = "masquerade"
            self.nickname = None
            self.dbs[self.path] = {"times": [-10000000000.0, 10000000000.0]}
            self.logger.info("Not a dbcentral database. Set single database.")
            return

        else:
            self.type = "dbcentral"
            if self.nickname is None:
                raise ValueError("Need nickname for Dbcentral clustername regex.")

        try:
            db = db.lookup("", "clusters", "", "dbNULL")
            null_time, null_endtime = db.getv("time", "endtime")
        except Exception as e:
            raise DbcentralException(
                "Cannot look up null values in clusters table. (%s)" % e
            )

        expr = "clustername =='%s'" % self.nickname

        try:
            db = db.subset(expr)
        except Exception as e:
            raise DbcentralException("Cannot subset on clustername. %s" % e)

        try:
            db = db.sort("time")
            nclusters = db.record_count
        except Exception as e:
            raise DbcentralException("Cannot sort on 'time' . %s" % e)

        if nclusters < 1:
            raise DbcentralException('No matches for nickname "%s".' % self.nickname)

        self.logger.debug("Records=%s" % nclusters)

        for i in range(nclusters):
            self.logger.debug("db.record=%s" % i)
            db.record = i

            try:
                dbname_template = db.extfile()[-1]
            except Exception as e:
                raise DbcentralException("Cannot run db.extfile(). %s" % e)

            self.logger.debug("dbname_template=%s" % dbname_template)

            try:
                volumes, net, time, endtime = db.getv(
                    "volumes", "net", "time", "endtime"
                )
            except Exception as e:
                raise DbcentralException(
                    "Problems with db.getv('volumes','net',"
                    + "'time','endtime'). (%s)\n" % e
                )

            self.logger.debug("volumes=%s" % volumes)
            self.logger.debug("net=%s" % net)
            self.logger.debug("time=%s" % time)
            self.logger.debug("endtime=%s" % endtime)

            if endtime == null_endtime:
                # This will be problematic with realtime systems
                endtime = stock.now()

            self.logger.debug("endtime=%s" % endtime)

            start_year = int(stock.epoch2str(time, "%Y"))
            end_year = int(stock.epoch2str(endtime, "%Y"))
            start_month = int(stock.epoch2str(time, "%L"))
            end_month = int(stock.epoch2str(endtime, "%L"))

            if volumes == "single":

                dbname = stock.epoch2str(time, dbname_template)
                self._test_db(time, endtime, dbname)

            elif volumes == "year":

                for y in range(start_year, end_year + 1):

                    voltime = stock.str2epoch("1/1/%s 00:00:00" % y)
                    volendtime = stock.str2epoch("12/31/%s 23:59:59" % y)
                    dbname = stock.epoch2str(voltime, dbname_template)

                    self._test_db(voltime, volendtime, dbname)

            elif volumes == "month":

                vol_month = start_month
                vol_year = start_year
                vol_endmonth = end_month
                vol_endyear = end_year

                while vol_year < end_year or (
                    vol_year == end_year and vol_month <= end_month
                ):

                    voltime = stock.str2epoch("%d/1/%d" % (vol_month, vol_year))

                    if vol_month < 12:
                        vol_month = vol_month + 1
                    else:
                        vol_year = vol_year + 1
                        vol_month = 1

                    volendtime = (
                        stock.str2epoch("%d/1/%d" % (vol_endmonth, vol_endyear)) - 1
                    )
                    dbname = stock.epoch2str(int(voltime), dbname_template)

                    self._test_db(voltime, volendtime, dbname)

            elif volumes == "day":

                start_day = int(stock.yearday(time))
                end_day = int(stock.yearday(endtime))

                vol_day = start_day

                while vol_day <= end_day:

                    voltime = stock.epoch(vol_day)
                    volendtime = voltime + 86399  # full day -1 sec
                    dbname = stock.epoch2str(voltime, dbname_template)

                    if self._test_db(voltime, volendtime, dbname):
                        self.dbs[dbname] = {"times": [time, endtime]}

                    vol_day = stock.yearday((stock.epoch(vol_day) + 86400))

            else:
                raise UnknownVolumeTypeException(volumes)

        self.logger.debug("DBS=%s" % self.dbs.keys())
Example #26
0
def main():

    ##########
    # Extract station list from master_stations
    ##########

    stations = []
    with ds.closing(ds.dbopen("/aerun/sum/db/dbsum/dbsum", "r")) as db:
        steps = ["dbopen affiliation", "dbjoin site", "dbsubset net=~/AK/ && offdate==NULL", "dbsort sta"]
        with ds.freeing(db.process(steps)) as dbview:
            for record in dbview.iter_record():
                stations.append(record.getv('sta')[0])
    
 
    
    ##########
    # Extract waveform data into trace objects
    ##########
    
    twin = 600
    tcurrent = float(stock.str2epoch('now'))
    tend = tcurrent - 60
    tstart = tend - twin
    
    f = '%Y_%m_%d'
    date = stock.epoch2str(tstart,f)
    
    with ds.closing(ds.dbopen("/aerun/op/run/db/archive_%s" % date, "r")) as db:
        for sta in stations:
            data = {}
            samplerate = {}
            dbsta = db.lookup(table = 'wfdisc')
            dbsta = dbsta.subset('sta=~/%s/' % sta )
            bband = dbsta.subset('chan=~/BH./')
            
            #######
            # Extract Broadband seismic data
            #######
            if bband.query('dbRECORD_COUNT') > 0:
#                print (tstart, tcurrent, "%s" % sta, "BHE")
                tr_bhe = dbsta.trloadchan(tstart, tend, "%s" % sta, "BHE")
                tr_bhe.trapply_calib()
                with ds.trfreeing(tr_bhe):
                    if tr_bhe.query("dbRECORD_COUNT") > 0:
                        tr_bhe.record = 0
                        data['BHE'] = tr_bhe.trdata()
                        samplerate['BHE'] = tr_bhe.getv("samprate")
                        
                tr_bhn = dbsta.trloadchan(tstart, tend, "%s" % sta, "BHN")
                tr_bhn.trapply_calib()
                with ds.trfreeing(tr_bhn):
                    if tr_bhn.query("dbRECORD_COUNT") > 0:
                        tr_bhn.record = 0
                        data['BHN'] = tr_bhn.trdata()
                        samplerate['BHN'] = tr_bhn.getv("samprate")
                        
                tr_bhz = dbsta.trloadchan(tstart, tend, "%s" % sta, "BHZ")
                tr_bhz.trapply_calib()
                with ds.trfreeing(tr_bhz):
                    if tr_bhz.query("dbRECORD_COUNT") > 0:
                        tr_bhz.record = 0
                        data['BHZ'] = tr_bhz.trdata()
                        samplerate['BHZ'] = tr_bhz.getv("samprate")
            #######
            # Extract moderate sample rate strong motion data
            #######
            smot_b = dbsta.subset('chan=~/BN./')
            if smot_b.query('dbRECORD_COUNT') > 0:
                tr_bne = dbsta.trloadchan(tstart, tend, "%s" % sta, "BNE")
                tr_bne.trapply_calib()
                with ds.trfreeing(tr_bne):
                    if tr_bne.query("dbRECORD_COUNT") > 0:
                        tr_bne.record = 0
                        data['BNE'] = tr_bne.trdata()
                        samplerate['BNE'] = tr_bne.getv("samprate")
                        
                tr_bnn = dbsta.trloadchan(tstart, tend, "%s" % sta, "BNN")
                tr_bnn.trapply_calib()
                with ds.trfreeing(tr_bnn):
                    if tr_bnn.query("dbRECORD_COUNT") > 0:
                        tr_bnn.record = 0
                        data['BNN'] = tr_bnn.trdata()
                        samplerate['BNN'] = tr_bnn.getv("samprate")
                        
                tr_bnz = dbsta.trloadchan(tstart, tend, "%s" % sta, "BNZ")
                tr_bnz.trapply_calib()
                with ds.trfreeing(tr_bnz):
                    if tr_bnz.query("dbRECORD_COUNT") > 0:
                        tr_bnz.record = 0
                        data['BNZ'] = tr_bnz.trdata()
                        samplerate['BNZ'] = tr_bnz.getv("samprate")
            
            #######
            # Extract high sample rate strong motion data
            #######
            smot_h = dbsta.subset('chan=~/HN./')
            if smot_h.query('dbRECORD_COUNT') > 0:
                tr_hne = dbsta.trloadchan(tstart, tend, "%s" % sta, "HNE")
                tr_hne.trapply_calib()
                with ds.trfreeing(tr_hne):
                    if tr_hne.query("dbRECORD_COUNT") > 0:
                        tr_hne.record = 0
                        data['HNE'] = tr_hne.trdata()
                        samplerate['HNE'] = tr_hne.getv("samprate")
                        
                tr_hnn = dbsta.trloadchan(tstart, tend, "%s" % sta, "HNN")
                tr_hnn.trapply_calib()
                with ds.trfreeing(tr_hnn):
                    if tr_hnn.query("dbRECORD_COUNT") > 0:
                        tr_hnn.record = 0
                        data['HNN'] = tr_hnn.trdata()
                        samplerate['HNN'] = tr_hnn.getv("samprate")
                        
                tr_hnz = dbsta.trloadchan(tstart, tend, "%s" % sta, "HNZ")
                tr_hnz.trapply_calib()
                with ds.trfreeing(tr_hnz):
                    if tr_hnz.query("dbRECORD_COUNT") > 0:
                        tr_hnz.record = 0
                        data['HNZ'] = tr_hnz.trdata()
                        samplerate['HNZ'] = tr_hnz.getv("samprate")
                        
#            if sta=="MLY":
#                plot_traces(sta, data, tstart, tend, samplerate)


#            shortperz = sbsta.subset('chan=~/EHZ/')
#            if smot.query('dbRECORD_COUNT') > 0:
#                tr_ehz = dbsta.trloadchan(tstart, tcurrent, "%s" % sta, "EHZ")
                
            print sta
            plot_traces(sta, data, tstart, tend, samplerate)
Example #27
0
    def new( self, rawpkt, name_type='pf/xi', select=False, reject=False, silent=False ):

        self.logging.debug( 'new packet' )

        if not rawpkt['_id'] :
            if not silent:
                self.logging.warning( 'Bad Packet: %s' % rawpkt )
            return

        self._clean()

        self.name_type = name_type

        self.rawpkt = self._convert_unicode( rawpkt )

        if reject and re.search( reject, self.rawpkt['srcType'] ):
            self.logging.debug( 'type [%s] rejected by configuration' % self.rawpkt['srcType'] )
            return

        if select and not re.search( select, self.rawpkt['srcType'] ):
            self.logging.debug( 'type [%s] missed selection by configuration' % self.rawpkt['srcType'] )
            return

        self.logging.debug( self.rawpkt )

        # Track IDs
        self.logSeqNo = self.rawpkt['messageLogSeqNo']
        self.seqNo = self.rawpkt['seqNo']
        self.id = "%s.%s" % ( self.logSeqNo, self.seqNo )

        # Date object
        self.datetime = self.rawpkt[ 'timestamp' ]
        # Epoch string
        self.time = (self.datetime-datetime.datetime(1970,1,1)).total_seconds()
        # Time string
        self.strtime = stock.epoch2str( self.time, '%D %H:%M:%S %Z', tz='UTC' ).strip()

        self.q330 = self.rawpkt['q330Sn']
        self.imei = self.rawpkt['deviceIMEI']
        self.src = self.rawpkt['srcType']
        self.srcname = self.src.lower()


        if not self.imei_buffer.add( imei=self.imei, serial=self.q330 ):
            self.logging.warning( 'Invalid Q330 serial [%s] for IMEI [%s]' % ( self.q330, self.imei ) )
            self.q330 = self.imei_buffer( self.imei )

            if not self.q330:
                if not silent:
                    self.logging.warning( 'UNKNOWN IMEI [%s]: SKIP DATA PACKET!!!' % self.imei )
                return
            else:
                if not silent:
                    self.logging.warning( 'USING CACHED Q330 SERIAL [%s] FOR IMEI [%s]' % ( self.q330, self.imei ) )

        for test in self.q330_serial_dlname:
            if test( self.q330 ):
                self.dlname = test( self.q330 )
                self.logging.debug( '%s => %s' % (self.q330, self.dlname) )

        if not self.dlname:
            if not silent:
                self.logging.warning( 'NO DLNAME FOR Q330 SERIAL: %s ' % self.q330 )
            return

        self.logging.debug( self.src )
        self.logging.debug( self.valueMap )

        # Verify if we have data pairs
        if 'valueMap' in self.rawpkt:
            self.valueMap = self.rawpkt['valueMap']

            # Extract each value to a new key:value on the dict
            for chan in self.valueMap:
                if chan in self.channel_mapping:
                    if not self.channel_mapping[chan]: continue

                    self.payload[ self.channel_mapping[chan] ] = self.valueMap[chan]

                    self.logging.debug( '%s -> %s:%s' % (chan, self.channel_mapping[chan], self.valueMap[chan]) )
                else:
                    self.logging.warning( '[%s] NOT DEFINED IN PF FILE' % chan )

            for test in [ 'xil1q', 'xil2q']:
                if test in self.payload and 'xisamp' in self.payload:
                    try:
                        self.payload[ test ] = float(self.payload[ test ]) / self.payload[ 'xisamp' ]
                    except:
                        pass

            self.pcktbuf = {
                    'dls': { self.dlname: self.payload },
                    'q330' : self.q330,
                    'imei' : self.imei,
                    'src' : self.src,
                    'srcname' : self.srcname
                }


        self.logging.debug( self.payload )


        # Try to build packet from info
        if self.name_type and self.time and self.payload:

            self.pkt.srcname = Pkt.SrcName( '%s/%s' % ( self.dlname,self.name_type ) )
            self.pkt.time = self.time

            #self.logging.debug( self.pkt.type )
            #self.logging.debug( self.pkt.srcname )

            # Extract pf structure, update it and return it.
            temp = self.pkt.pf
            temp.update( self.pcktbuf )
            self.pkt.pf = temp

            self.logging.debug( self.pkt.type )
            self.logging.debug( self.pkt.srcname )

            self.logging.debug( 'Pkt( %s, %s) => {%s}' % (self.pkt.srcname, self.pkt.time, self.pkt.pf.pf2string().replace('\n',', ').replace('\t',':') ) )

            self.valid = True

        else:
            self.logging.warning( 'NO VALUABLE INFORMATION IN PACKET. dlname:%s  time:%s' % (self.dlname, self.time ) )
            return

        self.logging.info( str(self) )
Example #28
0
    def plot_data(self, axs, result, reference, ref_sta, ref_chan, sta, start,
                  end):
        """Plot data."""
        k = 0
        for code in result:
            for i, chan in enumerate(result[code]):
                data = result[code][chan]

                if i == 0:
                    ind = 0 + k
                if i == 1:
                    ind = 1 + k
                if i == 2:
                    ind = 2 + k

                axs[ind].plot(reference[chan],
                              "b",
                              label="%s_%s%s" % (ref_sta, ref_chan, chan))
                axs[ind].plot(data.original,
                              "r",
                              label="%s_%s%s" % (sta, code, chan))
                axs[ind + 3].plot(reference[chan], "b")
                axs[ind + 3].plot(data.rotated, "r")

                axs[ind].legend(loc="upper left", prop={"size": 6})

                axs[ind + 6].xaxis.set_visible(False)
                axs[ind + 6].yaxis.set_visible(False)
                axs[ind + 6].patch.set_alpha(0.0)
                axs[ind + 6].axis("off")

                text = "Angle: %s\n" % data.azimuth
                text += "Xcorr: %s\n" % round(data.xcorr, 3)

                axs[ind + 6].annotate(
                    six.ensure_text(text, "utf-8"),
                    (0.5, 0.7),
                    xycoords="axes fraction",
                    va="top",
                    ha="center",
                    fontsize=6,
                    bbox=dict(edgecolor="white",
                              boxstyle="round, pad=0.5",
                              fc="w"),
                    size=12,
                )

                # y-axis labels
                if i == 0:
                    axs[ind].set_ylabel("original", fontsize=12)
                    axs[ind + 3].set_ylabel("rotated", fontsize=12)

                axs[ind].set_yticks([])
                axs[ind + 3].set_yticks([])

                axs[ind].set_xticks([])
                axs[ind + 3].set_xticks([])

                # xticks and xtick labels
                tw = end - start
                dt = tw / len(reference[chan])
                xticks = numpy.arange(0, len(reference[chan]),
                                      len(reference[chan]) / 4)
                xtick_labels = [
                    epoch2str(t, "%Y%j %H:%M:%S.%s")
                    for t in [start + x * dt for x in xticks]
                ]
                xtick_labels = xticks * dt - 2
                axs[ind + 3].set_xticks(xticks)
                axs[ind + 3].set_xticklabels(xtick_labels)
                axs[ind +
                    3].set_xlabel("time since predicated first-arrival (s)")

                if i == 1:
                    axs[ind].set_title(
                        "%s_%s compared to %s_%s" %
                        (ref_sta, ref_chan, sta, code),
                        fontsize=12,
                    )
            k += 9
Example #29
0
                            ])
        #phsub = "iphase=='P'"
        #dbv = dbview.subset(phsub)
        dbv = dbview.subset(asub)
        #dbv2 = dbview.subset(phsub)
        #dbv = dbv2.subset("sta=~/EC14/")
        logger.info('{0} records in assoc-arrival join after subset {1}'.format(dbv.record_count, asub))
        tj0 = time.time()
        for j in range(dbv.record_count):
            dbv.record = j
            (atime,sta,chan,iphs,phs,dist_deg,deltim,arid) = dbv.getv('arrival.time','sta',
                                                        'chan','iphase','assoc.phase',
                                                        'delta','deltim','arid')
            # TODO: check if -999.0, calculate if needed
            (seaz,esaz) = dbv.getv('seaz','esaz')
            pick_str = '{0} {1} {2} {3} '.format(sta, chan, iphs, stock.epoch2str(atime, "%D %H:%M:%S.%s"))
            logger.debug('Input pick: {0} {1} {2} {3} '.format(sta, chan, iphs, stock.epoch2str(atime, "%D %H:%M:%S.%s")))

            chan3 = get_chan3(dbsc, sta, chan, atime)
            if chan3 is None:
                logger.warning('Could not determine chan3 for {0}, skipping.'.format(pick_str))
                continue
            tstart = atime - pretime
            tend = atime + posttime
            # For some reason, doing this subset before calling
            # get3Ctr can SOMETIMES drastically speeds things up
            if args.presub:
                wsub = 'sta=~/{0}/'.format(sta)
                dbwfs = dbwf.subset(wsub)
                st3c = get3Ctr(dbwfs, sta, chan3, tstart, tend)
            else:
Example #30
0
def parse_time(epoch_time):
    return stock.epoch2str(epoch_time, '%GT%TZ')
Example #31
0
def run_gmt(stations, chans, db, i, filter, tstep, tpad, gmt_params, verbose):
	######################## 
	# Create the basemap plot for the given time window
	########################
	
	#os.system("grdimage -R-170/-130/52/72 -Jb-150/52/62/72/.25 /Users/mgardine/Development/GMT/alaska_5m.grd -I/Users/mgardine/Development/GMT/alaska_5m_int.grd -C/Users/mgardine/Development/GMT/gray_blue.cpt -Ba300f300/a300f300wsne:.\"\": -X1.5 -Y3 -P -K > ~/Development/scratch/realtime_gmv/regions_%s.ps" % i )
	## This is for the full state
	#os.system("grdimage -R-180/-127/50/72 -JCyl_stere/-153.5/61/6 /Users/mgardine/Development/GMT/alaska_3m.grd -I/Users/mgardine/Development/GMT/alaska_3m_int.grd -C/Users/mgardine/Development/GMT/gray_blue.cpt -Ba300f300g300/a300f300g300wsne:.\"\": -X1.5 -Y3 -P -K > ~/Development/scratch/realtime_gmv/regions_%s.ps" % i )
	#os.system("pscoast -R -J -B -K -Di -N1/black -Wblack -A100/0/1 -P -O >> ~/Development/scratch/realtime_gmv/regions_%s.ps" % i)
	
	
	# This is for 3-d view
	os.system("grdview /Users/mgardine/Development/aec_public/GMT/alaska_3m.grd -C/Users/mgardine/Development/aec_public/GMT/gray_blue.cpt -JCyl_stere/-153.5/61/6 -I/Users/mgardine/Development/aec_public/GMT/alaska_3m_int.grd -R-180/-127/50/72 -Ba300f300g300/a300f300g300wsne:.\"\": -E200/40 -Qi -P -X0.5 -Y3 -K > ~/Development/scratch/realtime_gmv/regions_%s.ps" % i )
	os.system("pscoast -R -J -B -K -Di -N1/black -Wblack -A100/0/1 -E200/40 -P -O >> ~/Development/scratch/realtime_gmv/regions_%s.ps" % i)

	
	## This is for Anchorage SM network
	#os.system("grdimage -R-150.2146/-149.5274/61.0289/61.2956 -JCyl_stere/-149.871/61.1623/6 /Users/mgardine/Development/GMT/anchorage.grd -I/Users/mgardine/Development/GMT/anchorage_int.grd -C/Users/mgardine/Development/GMT/gray_blue_anch.cpt -Ba300f300g300/a300f300g300wsne:.\"\": -X1.5 -Y4 -P -K > ~/Development/scratch/realtime_gmv/regions_%s.ps" % i )
	
	###
	# Added to test plotting hypocenter as a red star (M6.2 Willow EQ)
	###
	#os.system("echo -151.816 61.9449 | psxy -R -J -Sa0.2 -W7,red -P -O -K >> ~/Development/scratch/realtime_gmv/regions_%s.ps" %i )
	#os.system("echo -151.95 61.66 | psxy -R -J -Sa0.2 -W7,red -P -O -K >> ~/Development/scratch/realtime_gmv/regions_%s.ps" %i )
	# 3-d
	os.system("echo -153.15 59.91 0 | psxyz -R-180/-127/50/72/0/1 -J -Sa0.2 -E200/40 -W7,red -P -O -K >> ~/Development/scratch/realtime_gmv/regions_%s.ps" %i )

	
	j = 0
	data = []
	for sta in stations:
		dbsta = db.lookup("","site","","")
		dbsta = dbsta.subset("sta=~/%s/ && offdate==NULL" % sta)
		
		######################## 
		# Check to ensure that station records exist
		########################
		nsta = dbsta.query("dbRECORD_COUNT")
		if (nsta < 1):	
			print "Failed to load metadata for station %s, skipping " % sta
			j += 1
			continue
			
		######################## 
		# Get lat, lon of station and extract waveform data for time window (plus
		# time pad for removal of filter transients)
		########################
		dbsta[3] = 0
		lat = dbsta.getv('lat')
		lon = dbsta.getv('lon')
		tr = db.loadchan(i-tpad, i+tstep+tpad, "%s" % sta, "%s" % chans[j])
		
		######################## 
		# Check to ensure that waveform data is complete
		########################
		nseg = tr.query("dbRECORD_COUNT")
		if (nseg < 1):	
			f = '%D %H:%M:%S'
			if verbose:
				failed_time = stock.epoch2str(i, f)
				print "Failed to load data from %s:%s at %s" % (sta, chans[j], failed_time)
			tr.trdestroy()
			j += 1
			continue
			
		########################
		# Apply instrument calibrations and filter
		########################
		tr.apply_calib()
		tr.filter("%s" % filter)
		tr[3] = 0
		v = tr.data()
		
		########################
		# Calculate PGA in ug, ignoring the time pad value on each side of time window
		########################
		samprate = tr.getv("samprate")
		w = []
		### Not sure about the correctness of this:?
		#for k in range(int(samprate[0]),len(v)-int(samprate[0])):
		#	w.append(abs(float(v[k])))
		for k in range(0, len(v)-1):
		    w.append(abs(float(v[k])))
		
		if w:
			pga = max(w)/9810
			#os.system("echo %s %s %s | psxy -R -J -C%s -Sc0.1 -W0.1 -P -O -K >> ~/Development/scratch/realtime_gmv/regions_%s.ps" % (lon[0], lat[0], pga, gmt_params['cptfile'], i) )
			# 3-d
			os.system("echo %s %s %s | psxyz -R-180/-127/50/72/1/5000 -J -Jz3l -E200/40 -So0.1 -Gred -W0.1 -P -O -K >> ~/Development/scratch/realtime_gmv/regions_%s.ps" % (lon[0], lat[0], pga, i) )
		else:
			f = '%D %H:%M:%S'
			if verbose:
				failed_time = stock.epoch2str(i, f)
				print "Station %s:%s has gap at time %s" % (sta, chans[j], failed_time)
		
		tr.trdestroy()
		j += 1
Example #32
0
    def new(self, rawpkt):
        """Create a new Packet object.

        This works a little differently than most python objects because the
        underlying Antelope C code is leaky. The recommended way to manipulate
        packets is to create a packet buffer in memory and continually
        write/rewrite to that, rather than reallocate new memory from the heap.

        Caveats:
            This whole class is not at all Pythonic, and really a gigantic hack.
        """

        if not rawpkt[0] or int(float(rawpkt[0])) < 1:
            self.logger.info("Bad Packet: %s %s %s" %
                             (rawpkt[0], rawpkt[1], rawpkt[2]))
            return

        self._clean()

        self.rawpkt = rawpkt

        self.logger.debug(rawpkt)

        self.id = rawpkt[0]
        self.time = float(rawpkt[2])
        self.strtime = stock.epoch2str(self.time, "%D %H:%M:%S %Z").strip()

        # Try to extract information from packet
        pkt = Pkt.Packet(rawpkt[1], rawpkt[2], rawpkt[3])

        self.srcname = pkt.srcname if pkt.srcname else rawpkt[1]

        self.logger.info("%s %s %s" % (self.id, self.time, self.strtime))
        # self.logger.debug( pkt.pf )

        # Antelope 5.7 stock.ParameterFile.__getitem__ doesn't like the "foo in
        # bar" format.
        # Just try retrieving the value and catch whatever exception we get.
        # Antelope throws warnings if the key isn't found. We don't care.
        # https://stackoverflow.com/questions/14463277/how-to-disable-python-warnings
        with warnings.catch_warnings():
            warnings.simplefilter("ignore")
            try:
                self.dls = pkt.pf["dls"]
                self.valid = True
            except (KeyError, TypeError):
                self.dls = {}
                self.valid = False

            if self.valid:
                try:
                    self.imei = pkt.pf["imei"]
                    self.logger.info("Found imei: %s" % (pkt.pf["imei"]))
                except KeyError:
                    pass

                try:
                    self.q330 = pkt.pf["q330"]
                    self.logger.info("Found q330: %s" % (pkt.pf["q330"]))
                except KeyError:
                    pass
Example #33
0
def main():
	######################## 
	# Get information from execution flags
	########################
	(input, pfname) = configure()
	
	######################## 
	# Get information from parameter file
	########################
	(tstep, tpad, filter, sta, chan, animation_params, gmt_params) = get_pf(pfname)
	
	######################## 
	# Set start and end time for animation
	########################
	tstart = float(stock.str2epoch(input.ts))
	
	if input.ts is "now":
		tstart = tstart - (tstart % (input.twin)) - input.twin
	
	tend = tstart + float(input.twin)
	
	######################## 
	# Open waveform database
	########################
	
	if not input.dbname:
		f = '%Y_%m_%d'
		date = stock.epoch2str(tstart,f)
		db = ds.dbopen("/aerun/op/run/db/archive_%s" % date, "r")
	
	else:
		db = ds.dbopen(input.dbname, "r")

	######################## 
	# Setup format for creating timestamp on GMT plots
	########################	
	f = '%D %H:%M:%S %Z'
	timestamp = stock.epoch2str(tstart, f, "")
	## For state-wide 
	pstextstring = "-153 53 10 0 1 0 %s" % timestamp
	## For Anchorage
	#pstextstring = "-150 53 10 0 1 0 %s" % timestamp
	
	current_time = tstart
	while current_time < tend:
	
		######################## 
		# Get waveform data, compute PGA, and make GMT plot
		########################
		run_gmt(sta, chan, db, current_time, filter, tstep, tpad, gmt_params, input.verbose)
		
		######################## 
		# Make a timestamp every 10 seconds on the GMT plot
		########################	
		if (current_time%10 == 0):
			timestamp = stock.epoch2str(current_time, f, "")
			## For state-wide
			pstextstring = "-160 51 10 0 1 0 %s" % timestamp 
			# For Anchorage
			#pstextstring = "-150.1 61.04 10 0 1 0 %s" % timestamp
			print "Processing time %s" % timestamp
			#os.system("echo %s | pstext -R -J -Gblack -P -O -K >> ~/Development/scratch/realtime_gmv/regions_%s.ps" % (pstextstring,current_time))
			# 3-d
			os.system("echo %s | pstext -R -J -E200/40 -Gblack -P -O -K >> ~/Development/scratch/realtime_gmv/regions_%s.ps" % (pstextstring,current_time))
		else:
			#os.system("echo %s | pstext -R -J -Gblack -P -O -K >> ~/Development/scratch/realtime_gmv/regions_%s.ps" % (pstextstring,current_time))
			# 3-d
			os.system("echo %s | pstext -R -J -E200/40 -Gblack -P -O -K >> ~/Development/scratch/realtime_gmv/regions_%s.ps" % (pstextstring,current_time))
		
		######################## 
		# Plot scalebar
		########################
		## For state-wide
		#os.system("psscale -D4.1i/0.9i/1.0i/.20i -C%s -B:\"PGA (ug)\": -L0.0 -P -O >> ~/Development/scratch/realtime_gmv/regions_%s.ps" % (gmt_params['cptfile'], current_time) )
		## For Anchorage
		#os.system("psxy /Users/mgardine/Development/GMT/alaska_roads.xy -J -R -m -W2,darkgrey -K -O >> ~/Development/scratch/realtime_gmv/regions_%s.ps" % current_time )
		#os.system("psscale -D3.8i/0.7i/1.0i/.20i -C%s -B:\"PGA (mg)\": -L0.0 -P -O >> ~/Development/scratch/realtime_gmv/regions_%s.ps" % (gmt_params['cptfile'], current_time) )
		current_time += tstep
	
	######################## 
	# Make animated .GIF, remove temporary GMT plots
	########################		
	print "Making animation"
	## For state-wide
	os.system("/opt/local/bin/gm convert -density 200% -delay 10 -loop 0 -resize 1280x1280 +map ~/Development/scratch/realtime_gmv/regions*.ps ~/Development/scratch/realtime_gmv/animation.gif")
	os.system("convert ~/Development/scratch/realtime_gmv/animation.gif -layers OptimizeTransparency -crop 720x720+163+300 +repage ~/Development/scratch/realtime_gmv/animation1.gif")
	## For Anchorage
	#os.system("/opt/local/bin/gm convert -density 200% -delay 10 -loop 0 -resize 1280x1280 +map ~/Development/scratch/realtime_gmv/regions*.ps ~/Development/scratch/realtime_gmv/animation.gif")
	#os.system("convert ~/Development/scratch/realtime_gmv/animation.gif -layers OptimizeTransparency -crop 720x580+163+245 +repage ~/Development/scratch/realtime_gmv/animation1.gif")
	#os.system("rm -f ~/Development/scratch/realtime_gmv/regions_*.ps ~/Development/scratch/realtime_gmv/animation.gif")
	#os.system("mv ~/Development/scratch/realtime_gmv/animation1.gif %s/animation_rt.gif" % animation_params['webdir'])
	
	#os.system("ffmpeg -i ~/Development/scratch/realtime_gmv/animation1.gif -c:v libvpx -crf 12 ~/Development/scratch/realtime_gmv/animation.webm")
	db.close()
Example #34
0
def run_gmt(stations, chans, db, i, filter, tstep, tpad, verbose):
	######################## 
	# Create the basemap plot for the given time window
	########################
	os.system("grdimage -R-170/-130/52/72 -Jb-150/52/62/72/.25 /Users/mgardine/Development/GMT/alaska_5m.grd -I/Users/mgardine/Development/GMT/alaska_5m_int.grd -C/Users/mgardine/Development/GMT/gray_blue.cpt -Ba300f300/a300f300wsne:.\"\": -X1.5 -Y3 -P -K > ~/Development/scratch/realtime_gmv/regions_%s.ps" %i)
	#os.system("pscoast -R-170/-130/52/72 -Jb-150/52/62/72/.25 -K -Di -N1 -Ba300f300/a300f300wsne:.\"\": -W -S224/255/255 -G250/250/210 -A100/0/1 -X1.5 -Y3 -P >> ~/Development/scratch/realtime_gmv/regions_%s.ps" % i)
	os.system("pscoast -R -Jb -B -K -Di -N1/black -Wblack -A100/0/1 -P -O >> ~/Development/scratch/realtime_gmv/regions_%s.ps" % i)

	j = 0
	data = []
	for sta in stations:
		dbsta = db.lookup("","site","","")
		dbsta = dbsta.subset("sta=~/%s/ && offdate==NULL" % sta)
		
		######################## 
		# Check to ensure that station records exist
		########################
		nsta = dbsta.query("dbRECORD_COUNT")
		if (nsta < 1):	
			print "Failed to load metadata for station %s, skipping " % sta
			j += 1
			continue
			
		######################## 
		# Get lat, lon of station and extract waveform data for time window (plus
		# time pad for removal of filter transients)
		########################
		dbsta[3] = 0
		lat = dbsta.getv('lat')
		lon = dbsta.getv('lon')
		tr = db.loadchan(i-tpad, i+tstep+tpad, "%s" % sta, "%s" % chans[j])
		
		######################## 
		# Check to ensure that waveform data is complete
		########################
		nseg = tr.query("dbRECORD_COUNT")
		if (nseg < 1):	
			f = '%D %H:%M:%S'
			if verbose:
				failed_time = stock.epoch2str(i, f)
				print "Failed to load data from %s:%s at %s" % (sta, chans[j], failed_time)
			tr.trdestroy()
			j += 1
			continue
			
		########################
		# Apply instrument calibrations and filter
		########################
		tr.apply_calib()
		tr.filter("%s" % filter)
		tr[3] = 0
		v = tr.data()
		
		########################
		# Calculate PGA in ug, ignoring the time pad value on each side of time window
		########################
		samprate = tr.getv("samprate")
		w = []
		for k in range(int(samprate[0]),len(v)-int(samprate[0])):
			w.append(abs(float(v[k])))
		if w:
			pga = max(w)/9810
			os.system("echo %s %s %s | psxy -R -Jb -Cwhitered1.cpt -Sc0.1 -W0.1 -P -O -K >> ~/Development/scratch/realtime_gmv/regions_%s.ps" % (lon[0], lat[0], pga, i))
		else:
			f = '%D %H:%M:%S'
			if verbose:
				failed_time = stock.epoch2str(i, f)
				print "Station %s:%s has gap at time %s" % (sta, chans[j], failed_time)
		
		tr.trdestroy()
		j += 1
Example #35
0
def main():
	######################## 
	# Get information from execution flags
	########################
	(input, pfname) = configure()
	
	######################## 
	# Get information from parameter file
	########################
	(tstep, tpad, filter, sta, chan) = get_pf(pfname)
	
	######################## 
	# Set start and end time for animation
	########################
	tstart = float(stock.str2epoch(input.ts))
	
	if input.ts is "now":
		tstart = tstart - (tstart % (input.twin)) - input.twin
	
	tend = tstart + input.twin
	
	#tstart = float(stock.str2epoch('8/27/2013 21:40:00'))
	#tend = float(stock.str2epoch('8/27/2013 21:50:00'))	
	#tstart = float(stock.str2epoch('8/25/2013 17:47:00'))
	#tend = float(stock.str2epoch('8/25/2013 17:57:00'))
	#tstart = float(stock.str2epoch('8/21/2013 18:37:00'))
	#tend = float(stock.str2epoch('8/21/2013 18:44:00'))
	
	######################## 
	# Open waveform database
	########################
	
	if not input.dbname:
		f = '%Y_%m_%d'
		date = stock.epoch2str(tstart,f)
		db = ds.dbopen("/aerun/op/run/db/archive_%s" % date, "r")
		
	#db = ds.dbopen("/aerun/op/run/db/archive_2013_08_21", "r")
	#db = ds.dbopen("/aerun/op/run/db/archive_2013_08_25", "r")
	#db = ds.dbopen("/aerun/op/run/db/archive_2013_08_27", "r")
	
	else:
		db = ds.dbopen(input.dbname, "r")

	######################## 
	# Setup format for creating timestamp on GMT plots
	########################	
	f = '%D %H:%M:%S'
	timestamp = stock.epoch2str(tstart, f)
	pstextstring = "-153 53 10 0 1 0 %s" % timestamp
	
	current_time = tstart
	while current_time < tend:
	
		######################## 
		# Get waveform data, compute PGA, and make GMT plot
		########################
		run_gmt(sta, chan, db, current_time, filter, tstep, tpad, input.verbose)
		
		######################## 
		# Make a timestamp every 10 seconds on the GMT plot
		########################	
		if (current_time%10 == 0):
			timestamp = stock.epoch2str(current_time, f)
			pstextstring = "-153 53 10 0 1 0 %s" % timestamp 
			print "Processing time %s" % timestamp
			os.system("echo %s | pstext -R -Jb -Gblack -P -O -K >> ~/Development/scratch/realtime_gmv/regions_%s.ps" % (pstextstring,current_time))
		else:
			os.system("echo %s | pstext -R -Jb -Gblack -P -O -K >> ~/Development/scratch/realtime_gmv/regions_%s.ps" % (pstextstring,current_time))
		
		######################## 
		# Plot scalebar
		########################
		os.system("psscale -D4.1i/0.9i/1.0i/.20i -Cwhitered1.cpt -B:\"PGA (ug)\": -L0.0 -P -O >> ~/Development/scratch/realtime_gmv/regions_%s.ps" % current_time)
		current_time += tstep
	
	######################## 
	# Make animated .GIF, remove temporary GMT plots
	########################		
	print "Making animation"
	os.system("convert -delay 10 -loop 2 ~/Development/scratch/realtime_gmv/regions*.ps ~/Development/scratch/realtime_gmv/animation.gif")
	os.system("rm -f ~/Development/scratch/realtime_gmv/regions_*.ps")
	os.system("mv ~/Development/scratch/realtime_gmv/animation.gif /usr/local/mosaic/Input/mgardine/animation_rt.gif")
	db.close()
Example #36
0
                volumes,net,time,endtime = db.getv("volumes","net","time","endtime")
            except Exception,e:
                raise DbcentralException("Problems with db.getv('volumes','net','time','endtime'). (%s)\n" % e)

            self.logger.debug( "volumes=%s" % volumes )
            self.logger.debug( "net=%s" % net )
            self.logger.debug( "time=%s" % time )
            self.logger.debug( "endtime=%s" % endtime )

            if endtime == null_endtime:
                # This will be problematic with realtime systems
                endtime = stock.now()

            self.logger.debug( "endtime=%s" % endtime )

            start_year  = int(stock.epoch2str(time,"%Y"))
            end_year    = int(stock.epoch2str(endtime,"%Y"))
            start_month = int(stock.epoch2str(time,"%L"))
            end_month   = int(stock.epoch2str(endtime,"%L"))

            if volumes == 'single':

                self._test_db(voltime,volendtime,dbname)

            elif volumes == 'year':

                for y in range(start_year,end_year+1):

                    voltime    = stock.str2epoch("1/1/%s 00:00:00" % y)
                    volendtime = stock.str2epoch("12/31/%s 23:59:59" % y)
                    dbname     = stock.epoch2str(voltime,dbname_template)
Example #37
0
def generate_report(params):
    """
    Initiate QC tests and report generation/distribution.

    Behaviour:
    The main function intended to be called by importer. Initiates QC
    tests and report generation/disribution.

    Arguments:
    params - All parametrs <dict>
    params['dbin'] - Input database <str>
    params['pf'] - Parameter file <str>
    params['network'] - Network <str>
    params['tstart'] - Epoch start time <float>
    params['tend'] - Epoch end time <float>

    """
    import sys
    import os
    sys.path.append('%s/data/python' % os.environ['ANTELOPE'])
    from antelope.datascope import dbopen
    from antelope.stock import epoch2str
    sys.path.remove('%s/data/python' % os.environ['ANTELOPE'])
    params = _parse_pf(params)
    qc_network_report = _QC_Network_Report({'network': params['network'], \
        'tstart': params['tstart'], 'tend': params['tend'], \
        'send_email': params.pop('send_email'), 'email': params.pop('email'), \
        'smtp_server': params.pop('smtp_server')})
    db = dbopen(params['dbin'])
    db = db.lookup(table='wfmeas')
    db = db.subset("time == _%f_ && endtime == _%f_" \
            % (params['tstart'],params['tend']))
    db = db.sort('sta')
    db = db.group('sta')
    for db.record in range(db.nrecs()):
        sta = db.getv('sta')[0]
        issue_params = {'sta': sta}
        db_sta = db.subset("sta =~ /%s/" % sta)
        db_sta = db_sta.ungroup()
        db_sta = db_sta.sort('chan')
        db_sta = db_sta.group('chan')
        for db_sta.record in range(db_sta.nrecs()):
            chan = db_sta.getv('chan')[0]
            issue_params['chan'] = chan
            db_chan = db_sta.subset("chan =~ /%s/" % chan)
            db_chan = db_chan.ungroup()
            db_chan = db_chan.sort('meastype')
            db_chan = db_chan.group('meastype')
            for db_chan.record in range(db_chan.nrecs()):
                db_meas = db_chan.subset("meastype =~ /%s/" \
                        % db_chan.getv('meastype')[0])
                db_meas = db_meas.ungroup()
                db_meas = db_meas.sort('tmeas')
                count = db_meas.nrecs()
                db_meas.record = 0
                meastype = db_meas.getv('meastype')[0]
                if count == 1:
                    ts, twin = db_meas.getv('tmeas', 'twin')
                    te = ts + twin
                    message = "%s test failed once between %s - %s\t%s\n" \
                            % (meastype, epoch2str(ts, "%Y%j %H:%M:%S"), \
                            epoch2str(te, "%Y%j %H:%M:%S"), _build_wf_link(sta,\
                            chan, params['netcode'], ts, te))
                else:
                    message = "%s test failed %d times between:" \
                            % (meastype, count)
                    for db_meas.record in range(count):
                        ts, twin = db_meas.getv('tmeas', 'twin')
                        te = ts + twin
                        message = "%s\n\t\t\t%s - %s\t%s" \
                                % (message, epoch2str(ts, "%Y%j %H:%M:%S"), \
                                epoch2str(te, "%Y%j %H:%M:%S"), \
                                _build_wf_link(sta, chan, params['netcode'], \
                                ts, te))
                    message = "%s\n" % message
                issue_params['message'] = message
                qc_network_report.add_issue(_QC_issue(issue_params))
    qc_network_report.report()
Example #38
0
    def get_stations(self, regex, time, reference=False, event_data=None):
        """Get site info for each station."""
        yearday = stock.epoch2str(time, "%Y%j")

        steps = [
            "dbsubset ondate <= %s && (offdate >= %s || offdate == NULL)" %
            (yearday, yearday)
        ]

        steps.extend(["dbsort sta"])
        steps.extend(["dbsubset %s" % regex])

        self.logger.info("Database query for stations:")
        self.logger.info(", ".join(steps))

        with datascope.freeing(self.table.process(steps)) as dbview:
            self.logger.info("Extracting sites for origin from db")

            strings = []
            for temp in dbview.iter_record():
                (sta, lat, lon, chan) = temp.getv("sta", "lat", "lon", "chan")

                if len(chan) > 3:
                    chan_code = chan[:2] + "._."
                else:
                    chan_code = chan[:2]

                string = sta + chan_code

                if string not in strings:
                    strings.append(string)
                    try:
                        self.stations[sta].append_chan(chan_code)
                    except Exception:
                        self.stations[sta] = Records(sta, lat, lon)
                        self.stations[sta].append_chan(chan_code)
                        if reference and sta != reference:
                            ssaz = "%0.2f" % temp.ex_eval(
                                "azimuth(%s,%s,%s,%s)" % (
                                    self.stations[reference].lat,
                                    self.stations[reference].lon,
                                    lat,
                                    lon,
                                ))
                            ssdelta = "%0.4f" % temp.ex_eval(
                                "distance(%s,%s,%s,%s)" % (
                                    self.stations[reference].lat,
                                    self.stations[reference].lon,
                                    lat,
                                    lon,
                                ))
                            ssdistance = round(
                                temp.ex_eval("deg2km(%s)" % ssdelta), 2)

                            self.stations[sta].set_ss(ssaz, ssdelta,
                                                      ssdistance)

                        if event_data:
                            seaz = "%0.2f" % temp.ex_eval(
                                "azimuth(%s,%s,%s,%s)" %
                                (lat, lon, event_data.lat, event_data.lon))
                            esaz = "%0.2f" % temp.ex_eval(
                                "azimuth(%s,%s,%s,%s)" %
                                (event_data.lat, event_data.lon, lat, lon))
                            delta = "%0.4f" % temp.ex_eval(
                                "distance(%s,%s,%s,%s)" %
                                (event_data.lat, event_data.lon, lat, lon))
                            realdistance = temp.ex_eval("deg2km(%s)" % delta)

                            pdelay = int(
                                temp.ex_eval("pphasetime(%s,%s)" %
                                             (delta, event_data.depth)))

                            if pdelay > 0:
                                pdelay -= 1
                            else:
                                pdelay = 0

                            ptime = time + pdelay

                            self.stations[sta].set_es(seaz, esaz, delta,
                                                      realdistance, pdelay,
                                                      ptime)

        return self.stations
Example #39
0
def generate_report(params):
    """
    Initiate QC tests and report generation/distribution.

    Behaviour:
    The main function intended to be called by importer. Initiates QC
    tests and report generation/disribution.

    Arguments:
    params - All parametrs <dict>
    params['dbin'] - Input database <str>
    params['pf'] - Parameter file <str>
    params['network'] - Network <str>
    params['tstart'] - Epoch start time <float>
    params['tend'] - Epoch end time <float>

    """
    import sys
    import os

    sys.path.append("%s/data/python" % os.environ["ANTELOPE"])
    from antelope.datascope import dbopen
    from antelope.stock import epoch2str

    sys.path.remove("%s/data/python" % os.environ["ANTELOPE"])
    params = _parse_pf(params)
    qc_network_report = _QC_Network_Report(
        {
            "network": params["network"],
            "tstart": params["tstart"],
            "tend": params["tend"],
            "send_email": params.pop("send_email"),
            "email": params.pop("email"),
            "smtp_server": params.pop("smtp_server"),
        }
    )
    db = dbopen(params["dbin"])
    db = db.lookup(table="wfmeas")
    db = db.subset(
        "time == _%f_ && endtime == _%f_" % (params["tstart"], params["tend"])
    )
    db = db.sort("sta")
    db = db.group("sta")
    for db.record in range(db.nrecs()):
        sta = db.getv("sta")[0]
        issue_params = {"sta": sta}
        db_sta = db.subset("sta =~ /%s/" % sta)
        db_sta = db_sta.ungroup()
        db_sta = db_sta.sort("chan")
        db_sta = db_sta.group("chan")
        for db_sta.record in range(db_sta.nrecs()):
            chan = db_sta.getv("chan")[0]
            issue_params["chan"] = chan
            db_chan = db_sta.subset("chan =~ /%s/" % chan)
            db_chan = db_chan.ungroup()
            db_chan = db_chan.sort("meastype")
            db_chan = db_chan.group("meastype")
            for db_chan.record in range(db_chan.nrecs()):
                db_meas = db_chan.subset(
                    "meastype =~ /%s/" % db_chan.getv("meastype")[0]
                )
                db_meas = db_meas.ungroup()
                db_meas = db_meas.sort("tmeas")
                count = db_meas.nrecs()
                db_meas.record = 0
                meastype = db_meas.getv("meastype")[0]
                if count == 1:
                    ts, twin = db_meas.getv("tmeas", "twin")
                    te = ts + twin
                    message = "%s test failed once between %s - %s\t%s\n" % (
                        meastype,
                        epoch2str(ts, "%Y%j %H:%M:%S"),
                        epoch2str(te, "%Y%j %H:%M:%S"),
                        _build_wf_link(sta, chan, params["netcode"], ts, te),
                    )
                else:
                    message = "%s test failed %d times between:" % (meastype, count)
                    for db_meas.record in range(count):
                        ts, twin = db_meas.getv("tmeas", "twin")
                        te = ts + twin
                        message = "%s\n\t\t\t%s - %s\t%s" % (
                            message,
                            epoch2str(ts, "%Y%j %H:%M:%S"),
                            epoch2str(te, "%Y%j %H:%M:%S"),
                            _build_wf_link(sta, chan, params["netcode"], ts, te),
                        )
                    message = "%s\n" % message
                issue_params["message"] = message
                qc_network_report.add_issue(_QC_issue(issue_params))
    qc_network_report.report()
Example #40
0
    def new(self,
            rawpkt,
            name_type="pf/xi",
            select=False,
            reject=False,
            silent=False):
        """Generate a new packet."""

        self.logging.debug("new packet")

        if not rawpkt["_id"]:
            if not silent:
                self.logging.warning("Bad Packet: %s" % rawpkt)
            return

        self._clean()

        self.name_type = name_type

        # Disable _convert_unicod - it seems to have the opposite affect under
        # python 3 or the newer pymongo library.
        # self.rawpkt = self._convert_unicode(rawpkt)
        self.rawpkt = rawpkt

        if reject and re.search(reject, self.rawpkt["srcType"]):
            self.logging.debug("type [%s] rejected by configuration" %
                               self.rawpkt["srcType"])
            return

        if select and not re.search(select, self.rawpkt["srcType"]):
            self.logging.debug("type [%s] missed selection by configuration" %
                               self.rawpkt["srcType"])
            return

        self.logging.debug(self.rawpkt)

        # Track IDs
        self.logSeqNo = self.rawpkt["messageLogSeqNo"]
        self.seqNo = self.rawpkt["seqNo"]
        self.id = "%s.%s" % (self.logSeqNo, self.seqNo)

        # Date object
        self.datetime = self.rawpkt["timestamp"]
        # Epoch string
        self.time = (self.datetime -
                     datetime.datetime(1970, 1, 1)).total_seconds()
        # Time string
        self.strtime = stock.epoch2str(self.time, "%D %H:%M:%S %Z",
                                       tz="UTC").strip()

        self.q330 = self.rawpkt["q330Sn"]
        self.imei = self.rawpkt["deviceIMEI"]
        self.src = self.rawpkt["srcType"]
        self.srcname = self.src.lower()

        if not self.imei_buffer.add(imei=self.imei, serial=self.q330):
            self.logging.warning("Invalid Q330 serial [%s] for IMEI [%s]" %
                                 (self.q330, self.imei))
            self.q330 = self.imei_buffer(self.imei)

            if not self.q330:
                if not silent:
                    self.logging.warning(
                        "UNKNOWN IMEI [%s]: SKIP DATA PACKET!!!" % self.imei)
                return
            else:
                if not silent:
                    self.logging.warning(
                        "USING CACHED Q330 SERIAL [%s] FOR IMEI [%s]" %
                        (self.q330, self.imei))

        for test in self.q330_serial_dlname:
            if test(self.q330):
                self.dlname = test(self.q330)
                self.logging.debug("%s => %s" % (self.q330, self.dlname))

        if not self.dlname:
            if not silent:
                self.logging.warning("NO DLNAME FOR Q330 SERIAL: %s " %
                                     self.q330)
            return

        self.logging.debug(self.src)
        self.logging.debug(self.valueMap)

        # Verify if we have data pairs
        if "valueMap" in self.rawpkt:
            self.valueMap = self.rawpkt["valueMap"]

            # Extract each value to a new key:value on the dict
            for chan in self.valueMap:
                if chan in self.channel_mapping:
                    if not self.channel_mapping[chan]:
                        continue

                    self.payload[
                        self.channel_mapping[chan]] = self.valueMap[chan]

                    self.logging.debug("%s -> %s:%s" %
                                       (chan, self.channel_mapping[chan],
                                        self.valueMap[chan]))
                else:
                    self.logging.warning("[%s] NOT DEFINED IN PF FILE" % chan)

            for test in ["xil1q", "xil2q"]:
                if test in self.payload and "xisamp" in self.payload:
                    try:
                        self.payload[test] = (float(self.payload[test]) /
                                              self.payload["xisamp"])
                    except Exception:
                        pass

            self.pcktbuf = {
                "dls": {
                    self.dlname: self.payload
                },
                "q330": self.q330,
                "imei": self.imei,
                "src": self.src,
                "srcname": self.srcname,
            }

        self.logging.debug(self.payload)

        # Try to build packet from info
        if self.name_type and self.time and self.payload:

            self.pkt.srcname = Pkt.SrcName("%s/%s" %
                                           (self.dlname, self.name_type))
            self.pkt.time = self.time

            # self.logging.debug( self.pkt.type )
            # self.logging.debug( self.pkt.srcname )

            # Extract pf structure, update it and return it.
            temp = self.pkt.pf
            temp.update(self.pcktbuf)
            self.pkt.pf = temp

            self.logging.debug(self.pkt.type)
            self.logging.debug(self.pkt.srcname)

            self.logging.debug("Pkt( %s, %s) => {%s}" % (
                self.pkt.srcname,
                self.pkt.time,
                self.pkt.pf.pf2string().replace("\n", ", ").replace("\t", ":"),
            ))

            self.valid = True

        else:
            self.logging.warning(
                "NO VALUABLE INFORMATION IN PACKET. dlname:%s  time:%s" %
                (self.dlname, self.time))
            return

        self.logging.info(str(self))
Example #41
0
            self.logger.debug("time=%s" % time)
            self.logger.debug("endtime=%s" % endtime)

            if endtime == null_endtime:
                # This will be problematic with realtime systems
                endtime = stock.now()

            self.logger.debug("endtime=%s" % endtime)

            if self.volumes == 'single':

                self._test_db(voltime, volendtime, dbname)

            elif self.volumes == 'year':

                start_year = int(stock.epoch2str(time, "%Y"))
                end_year = int(stock.epoch2str(endtime, "%Y"))

                for y in range(start_year, end_year + 1):

                    voltime = stock.str2epoch("1/1/%s 00:00:00" % y)
                    volendtime = stock.str2epoch("12/31/%s 23:59:59" % y)
                    dbname = stock.epoch2str(voltime, dbname_template)

                    self._test_db(voltime, volendtime, dbname)

            elif self.volumes == 'month':

                start_month = int(stock.epoch2str(time, "%L"))
                start_year = int(stock.epoch2str(time, "%Y"))
                end_month = int(stock.epoch2str(endtime, "%L"))