def doneWithFile(filename): """Move filename to DONESD.""" me = 'doneWithFile(%s)' % repr(filename) _sl.info(me) moved = False # Pessimistic. try: # Moving? if not DONESD: return # SRC, SNK. src = os.path.normpath(WPATH + '/' + filename) snk = os.path.normpath(WPATH + '/' + DONESD + '/' + filename) # No SRC, already SNK? if not os.path.isfile(src): _m.beeps(1) errmsg = 'src dne' _sl.warning(errmsg) return if os.path.isfile(snk): _m.beeps(1) errmsg = 'snk already in %s' % DONESD _sl.warning(errmsg) return # Do the move. A failure is squawked and tolerated. try: shutil.move(src, snk) moved = True except Exception as E: moved = False _m.beeps(3) errmsg = 'moving %s to %s failed: %s' % (filename, DONESD, E) _sl.warning(errmsg) pass # POR. # Still SRC, no SNK? if os.path.isfile(src): moved = False _m.beeps(1) errmsg = 'src not moved' _sl.warning(errmsg) return if not os.path.isfile(snk): moved = False _m.beeps(1) errmsg = 'snk dne in %s' % DONESD _sl.warning(errmsg) return except Exception as E: errmsg = '%s: E: %s @ %s' % (me, E, _m.tblineno()) DOSQUAWK(errmsg) raise finally: if moved: FFWDB.delete(filename)
def DOSQUAWK(errmsg, beeps=3): """For use in exception blocks.""" global SQUAWKED if not SQUAWKED: _m.beeps(beeps) for em in errmsg.split('\n'): _sl.error(em) SQUAWKED = True
def xlog2db(): global SRCID, SUBID, WPATH, DONESD, INTERVAL global FFWDBPFN, FWTSTOP, FWTSTOPPED, XLOGDB me, action = 'xlog2db', '' try: _sl.info(me + ' begins')#$# SRCID = _a.ARGS['--srcid'] SUBID = _a.ARGS['--subid'] WPATH = _a.ARGS['--wpath'].rstrip('/').rstrip('/') DONESD = _a.ARGS['--donesd'] INTERVAL = float(_a.ARGS['--interval']) DBCFG = _a.ARGS['--xlogdb'] # DB connection configuration, as a string. DBCFG = eval(DBCFG) # ..., as a dict. _sl.info() _sl.info(' srcid: ' + SRCID) _sl.info(' subid: ' + SUBID) _sl.info(' wpath: ' + WPATH) _sl.info(' done sd: ' + DONESD) _sl.info(' interval: ' + str(INTERVAL)) _sl.info(' db cfg: ' + repr(DBCFG)) _sl.info() # FFW DB PFN. DB creation must be done in watcherThread. FFWDBPFN = os.path.normpath(WPATH + '/xlog2db.s3') # Open sink db. XLOGDB = mc.connect(host=DBCFG['host'], user=DBCFG['user'], password=DBCFG['password'], db=DBCFG['database'], raise_on_warnings=True) # Start watcher() in a thread. watcher_thread = threading.Thread(target=watcherThread) watcher_thread.start() # Wait for startup. while not FWTRUNNING: time.sleep(0.010) # Wait for shutdown. while FWTRUNNING: time.sleep(1) # Ctrl-c to stop & exit. except KeyboardInterrupt as E: _m.beeps(1) msg = '2: {}: KeyboardInterrupt: {}'.format(me, E) _sl.warning(msg) ###---DOSQUAWK(errmsg, beeps=1) pass###raise except Exception as E: errmsg = '{}: E: {} @ {}'.format(ME, E, _m.tblineno()) DOSQUAWK(errmsg) raise finally: if watcher_thread and FWTRUNNING: FWTSTOP = True watcher_thread.join(3 * INTERVAL) _sl.info('thread STOPPED: %s' % FWTSTOPPED)
def __init__( self, xloghost=None, xlogport=None, xsrcid=None, xsubid=None, xlogel="2", xlogsl="_", sl=None, sw=None, txrate=0.02, nop=False, ): me = "XLog.__init__" self.nop = nop # Make a do-nothing logger. self.xlog = None # Default no logging until host & port connected. self.xloghost = xloghost # xlog server host IP. self.xlogport = xlogport # xlog server port. # srcid, subid, el & sl are defaults used by message builders if no # overrides are supplied in the log dict. self.xsrcid = xsrcid # Initial source ID. self.xsubid = xsubid # Initial sub ID. self.xlogel = xlogel # Default error-level (info). self.xlogsl = xlogsl # Default sub-level (base). # Optional simple logger and screenwriter objects. self.sl = sl # Optional simple logger to mimic what's being # sent to xlog. self.sw = sw # Optional screen writer object for # error messages (otherwise print). # Message queue. self.q = None # Queue of messages to send. # Processing thread. self.t = None # Thread to send messages. # Stop signal. self.stop = False # External signal to stop XLog instance. self.stopped = False # When stopped by self.stop. # Count messages. self.nmsgs = 0 # Number of messages sent. # if self.xloghost and self.xlogport: try: if not self.nop: self.xlog = XLogTxRx((self.xloghost, self.xlogport), txrate=txrate) self.q = queue.Queue(10000) # 10^4 self.t = threading.Thread(target=self._xlogthread) # ??? daemon? self.t.start() if FAIL_CONNECT: 1 / 0 except Exception as E: _m.beeps(10) errmsg = "{}: {}: {} @ {}".format(me, "setup failed", E, _m.tblineno()) self._p("** " + errmsg) raise Exception(errmsg)
def close(self): """Shut down xlog wrap.""" if self.nop: self.stopped = True return self.stop = True self.t.join(timeout=1) if self.t.is_alive(): _m.beeps(3) errmsg = "XLog._xlogthread did not self-stop" self._p("** " + errmsg) self.xlog.close() 1 / 1
def watcherThread(): """A thread to watch WPATH for files to process.""" global LOADRECS, FFWDB, FWTRUNNING, FWTSTOP, FWTSTOPPED LOADRECS = [] me = 'watcher thread' try: FWTRUNNING = True assert XLOGDB, 'no XLOGDB' # Connect to FlatFileWatchDataBase. FFWDB = ffwdb.FFWDB(FFWDBPFN) assert FFWDB, 'no FFWDB' uu = 0 # Unix Utc. while not FWTSTOP: # flushHeartbeats() # Wait out INTERVAL. z = time.time() w = INTERVAL - (z - uu) if w > 0: _sw.wait(w) uu = _dt.utcut() ul = _dt.locut(uu) uuts = '%15.4f' % uu # 15.4, unblanked fraction. uuiosfs = _dt.ut2isofs(uu) uliosfs = _dt.ut2isofs(ul) # Heartbeat? if OWNHEARTBEAT: logrec = ownHeartbeat(uu) addHeartbeat(logrec) # Files? t0 = time.perf_counter(); fis = getFIs(uu) t1 = time.perf_counter(); if TIMINGS: _sl.warning(' getFIs: {:9,.1f} ms'.format((1000*(t1-t0)))) if not fis: errmsg = 'no logfiles @ ' + uliosfs raise Exception(errmsg) # Update FFWDB. # Freshen the "acquired" timestamp. # Delete entries for nonexistent files. t0 = time.perf_counter(); filenames = [fi['filename'] for fi in fis] filenames.sort() # Compare real (nf) and db (nx) counts. nf = len(filenames) nx = FFWDB.count() if nf < nx: nf, nx = nf, nx elif nf > nx: nf, nx = nf, nx else: nf, nx = nf, nx FFWDB.acquired(filenames, uu) for fi in fis: z = updateDB(fi) t1 = time.perf_counter(); if TIMINGS: _sl.warning('updateDBs: {:9,.1f} ms'.format((1000*(t1-t0)))) # Find the oldest, newest unfinished files in DB. t0 = time.perf_counter(); o_dbfi, n_dbfi = FFWDB.oldestnewest('u') t1 = time.perf_counter(); if TIMINGS: _sl.warning(' oldest: {:9,.1f} ms'.format((1000*(t1-t0)))) # Something unfinished? if o_dbfi: fn = o_dbfi['filename'] historical = (fn != n_dbfi['filename']) # Export the file. exportFile(historical, o_dbfi) # !CHANGE! # Move oldest finished file? # It must not be the only, and therefore "live", file. if DONESD: o_dbfi, n_dbfi = FFWDB.oldestnewest('f') if o_dbfi and (n_dbfi['filename'] != o_dbfi['filename']): t0 = time.perf_counter(); doneWithFile(o_dbfi['filename']) t1 = time.perf_counter(); if TIMINGS: _sl.warning(' moved: {:9,.1f} ms'.format((1000*(t1-t0)))) if ONECHECK: FWTSTOP = True except KeyboardInterrupt as E: _m.beeps(1) # watcherThread: msg = '1: {}: KeyboardInterrupt: {}'.format(me, E) _sl.warning(msg) ###---DOSQUAWK(errmsg, beeps=1) pass###raise # Let the thread exit. Avoids "Exception in thread...". except Exception as E: errmsg = '%s: E: %s @ %s' % (me, E, _m.tblineno()) DOSQUAWK(errmsg) raise finally: # Flush heartbeats and loadrecs. flushHeartbeats() loadrecs2db() if FWTSTOP: FWTSTOPPED = True FFWDB.disconnect() _sl.info('%s exits. STOPPED: %s' % (me, str(FWTSTOPPED))) FWTRUNNING = False
def exportFile(historical, xfi): """Export a file (from info dict).""" fn = xfi['filename'] me = 'exportFile(%s, %s)' % (str(historical), fn) _sl.info('%s %s %s' % (_dt.ut2iso(_dt.locut()), fn, 'h' if historical else ''))#$# nb2e = 0 # Finally references. try: # Safety flush. loadrecs2db() # How many bytes of file is to be exported? fskip = xfi['processed'] fsize = xfi['size'] nb2e = fsize - fskip if nb2e <= 0: return # 2/2: Optionally append stuff to logfile to # simulate intraprocess mofification. # 151213-02.log -> 50,262 bytes -> 51,630 bytes. # Still exists? pfn = os.path.normpath(WPATH + '/' + fn) if not os.path.isfile(pfn): _m.beeps(1) errmsg = '%s: file dne' % me _sl.warning(errmsg) return # .gz files are always treated as historical, and # the whole file is read. (No seek!) if pfn.endswith('.gz'): with gzip.open(pfn, 'r', encoding=ENCODING, errors=ERRORS) as f: for x, logrec in enumerate(f): if not (x % 1000): _sw.iw('.') logrec2loadrecs(logrec) return # Uncompressed files are read as text, with an initial # seek from the SOF. The file is read to its end, even # if this goes beyond the size given, which will # happen if XLOG appends to this file while we're # processing it. This will result in the appendage # being reprocessed next time around, but this is # harmless bcs logrec and heartbeat processing is # able to handle (skip) duplicates. with open(pfn, 'r', encoding=ENCODING, errors=ERRORS) as f: if fskip > 0: _sl.info('skipping {:,d} bytes'.format(fskip)) f.seek(fskip) for x, logrec in enumerate(f): if not (x % 1000): _sw.iw('.') logrec2loadrecs(logrec) except Exception as E: nb2e *= -1 # Prevent 'processed' update. errmsg = '%s: %s @ %s' % (me, E, _m.tblineno()) DOSQUAWK(errmsg) raise finally: # End dots. _sw.nl() # Close src file. try: f.close except: pass # Update 'processed'? if nb2e > 0 and not TESTONLY: xfi['processed'] = fsize z = {'filename': xfi['filename'], 'processed': xfi['processed']} FFWDB.update(z) # Flush heartbeats and loadrecs. flushHeartbeats() loadrecs2db()
# Test 2. if False: WPATH = 'c:/xlog/test/' DONESD = 'XL2DB' FFWDBPFN = os.path.normpath(WPATH + '/xlog2db.s3') FFWDB = ffwdb.FFWDB(FFWDBPFN) fn = '151207-19.log' doneWithFile(fn) doneWithFile(fn) 1/1 # Production. if True: try: xlog2db() except KeyboardInterrupt as E: _m.beeps(1) msg = '3: {}: KeyboardInterrupt: {}'.format(ME, E) _sl.warning(msg) ###---DOSQUAWK(errmsg, beeps=1) pass###raise except Exception as E: errmsg = '{}: E: {}'.format(ME, E) DOSQUAWK(errmsg) raise finally: shutDown() 1/1
def _xlogthread(self): """A thread to send a FIFO queue of objects to an xlog server.""" me = "_xlogthread" while not self.stop: try: o = self.q.get(timeout=1) # !MAGIC! 1 sec timeout. try: if FAIL_O: 1 / 0 otn = o[0] # object type number if otn == 0: # srcid, subid, el, sl, msg -> {_id, _si, _el, _sl, _msg} d = {"_id": str(o[1]), "_si": str(o[2]), "_el": str(o[3]), "_sl": str(o[4]), "_msg": str(o[5])} j = json.dumps(d, ensure_ascii=ENSURE_ASCII, sort_keys=SORT_KEYS) if DEBPRT: self._p("~~ 0: [%d] -> [%d]" % (len(d), len(j))) self.xlog.send(j) self.nmsgs += 1 continue elif otn == 1: # dict d = o[1] j = json.dumps(d, ensure_ascii=ENSURE_ASCII, sort_keys=SORT_KEYS) if DEBPRT: self._p("~~ 1: [%d] -> [%d]" % (len(d), len(j))) self.xlog.send(j) self.nmsgs += 1 continue elif otn == 2: # json j = o[1] if DEBPRT: self._p("~~ 2: [%d]" % (len(j))) self.xlog.send(j) self.nmsgs += 1 continue else: _m.beeps(3) # Switch to errmsg. errmsg = "{}: {}: {}".format(me, "unexpected object type number", repr(otn)) self._p("** " + errmsg) z = json.dumps( {"_id": self.xsrcid, "_si": self.xsubid, "_el": 5, "_sl": "!", "_msg": "[" + errmsg + "]"}, ensure_ascii=ENSURE_ASCII, sort_keys=SORT_KEYS, ) self.xlog.send(z) self.nmsgs += 1 continue pass except Exception as E: _m.beeps(3) errmsg = "{}: {} @ {}".format(me, E, _m.tblineno()) self._p("** " + errmsg) self._p("** " + repr(o)) # Switch to errmsg. z = json.dumps( {"_id": self.xsrcid, "_si": self.xsubid, "_el": 5, "_sl": "!", "_msg": "[" + errmsg + "]"}, ensure_ascii=ENSURE_ASCII, sort_keys=SORT_KEYS, ) self.xlog.send(z) self.nmsgs += 1 continue ###---self._xlogsend(z) except: # Queue get timeout. pass ##### not self.stop: ^^^ self.stopped = True
def tests(tn=None): import time global DEBPRT, FAIL_CONNECT, FAIL_O me, action = "tests", None def _p(s): nonlocal xlog if xlog and xlog.sw: xlog.sw.nlln(s.rstrip()) else: print(s.rstrip()) xlog = None _p("") _p("-- tests begins") DEBPRT = True if True and tn == 1: action = "fail connect test" _p("") _p(action) FAIL_CONNECT = True try: xlog = XLog( xloghost="192.168.100.1", xlogport=12321, xsrcid="TEST", xsubid="test", xlogel=0, xlogsl="_", sw=None, txrate=0.02, ) except Exception as E: _m.beeps(10) errmsg = "{}: {}: {}".format(me, action, E) _p("** " + errmsg) 1 / 1 FAIL_CONNECT = False if True and tn == 2: action = "simple log-type functions" _p("") _p("-- " + action) nm = 0 try: xlog = XLog( xloghost="192.168.100.1", xlogport=12321, xsrcid="TEST", xsubid="test", xlogel=0, xlogsl="_", sw=None, txrate=0.01, ) for x in range(100): xlog.null("f: null") nm += 1 xlog.debug("f: debug") nm += 1 xlog.info("f: info") nm += 1 xlog.warning("f: warning") nm += 1 xlog.error("f: error") nm += 1 xlog.critical("f: critical") nm += 1 xlog.extra("f: extra") nm += 1 for x in range(100): if not xlog.busy(): break time.sleep(0.1) time.sleep(0.1) # Needs a little more. nmsgs = xlog.nmsgs assert nmsgs == nm, "expecting %d but got %d nmsgs" % (nm, nmsgs) xlog.close() assert xlog.stopped, "did not see xlog.stopped" except Exception as E: _m.beeps(10) errmsg = "{}: {}: {}".format(me, action, E) _p("** " + errmsg) 1 / 1 if True and tn == 3: action = "prepared dict and json functions" _p("") _p("-- " + action) nm = 0 try: xlog = XLog( xloghost="192.168.100.1", xlogport=12321, xsrcid="TEST", xsubid="test", xlogel=0, xlogsl="_", sw=None, txrate=0.01, ) d = {"_id": "TEST", "_si": "test", "_el": 6, "_sl": "|", "_msg": "Hello World!", "foo": 123, "rat": "fink"} xlog.d2xlog(d) nm += 1 j = json.dumps(d, ensure_ascii=ENSURE_ASCII, sort_keys=SORT_KEYS) xlog.j2xlog(j) nm += 1 for x in range(100): if not xlog.busy(): break time.sleep(0.1) time.sleep(0.1) # Needs a little more. nmsgs = xlog.nmsgs assert nmsgs == nm, "expecting %d but got %d nmsgs" % (nm, nmsgs) xlog.close() assert xlog.stopped, "did not see xlog.stopped" except Exception as E: _m.beeps(10) errmsg = "{}: {}: {}".format(me, action, E) _p("** " + errmsg) 1 / 1 if True and tn == 4: action = "unexpected object type" _p("") _p("-- " + action) nm = 0 try: xlog = XLog( xloghost="192.168.100.1", xlogport=12321, xsrcid="TEST", xsubid="test", xlogel=0, xlogsl="_", sw=None, txrate=0.01, ) xlog.q.put((9, 123456789), block=False) nm += 1 for x in range(100): if not xlog.busy(): break time.sleep(0.1) time.sleep(1.0) # Needs enough time for beeps(3). nmsgs = xlog.nmsgs assert nmsgs == nm, "expecting %d but got %d nmsgs" % (nm, nmsgs) xlog.close() assert xlog.stopped, "did not see xlog.stopped" except Exception as E: _m.beeps(10) errmsg = "{}: {}: {}".format(me, action, E) _p("** " + errmsg) 1 / 1 if True and tn == 5: action = "fail processing object" _p("") _p("-- " + action) nm = 0 try: xlog = XLog( xloghost="192.168.100.1", xlogport=12321, xsrcid="_src", xsubid="_sub", xlogel=0, xlogsl="_", sw=None, txrate=0.01, ) FAIL_O = True xlog.msg2xlog("1/0") nm += 1 for x in range(100): if not xlog.busy(): break time.sleep(0.1) time.sleep(1.0) # Needs enough time for beeps(3). FAIL_O = False nmsgs = xlog.nmsgs assert nmsgs == nm, "expecting %d but got %d nmsgs" % (nm, nmsgs) xlog.close() assert xlog.stopped, "did not see xlog.stopped" except Exception as E: _m.beeps(10) errmsg = "{}: {}: {}".format(me, action, E) _p("** " + errmsg) 1 / 1 _p("") _p("-- tests ends") _p("") 1 / 1
def handle_connection(skt, address): global NCX, NOCX, XLOGSTOP try: NCX += 1 NOCX += 1 _sl.info('handle_connection: %d (%d open): %r' % (NCX, NOCX, address)) with skt.makefile(encoding=ENCODING, errors=ERRORS, newline='\n')as cf: while True: rx = cf.readline() tx = None #$#ml.debug('rx: %r' % rx.rstrip())#$# if rx: rx = rx.rstrip() if not rx: continue # ??? Or, should this be ACK'd with an OK? if rx[0] == '!' and rx[-1] == '!': tx = b'OK|' + rx.encode(encoding=ENCODING, errors=ERRORS) elif rx == '!STOP!': XLOGSTOP = True tx = b'OK' else: # Should be a log record. # Add the source IP. logrec = address[0] + _ + rx # Reformat to final log file format. (rc, rm, newrec) = reformatLogrec(logrec) # Queue to log file writing thread. if rc: LFQ.put(newrec) tx = b'OK' else: # Instead of an 'OK', echo the bad record. tx = 'E: ' + rm # The squawk from xlog. z = ':: ' + logrec # The offending logrec. _sl.error(tx)#$# _sl.error(z)#$# tx = tx.encode(encoding=ENCODING, errors=ERRORS) # Respond to sender. if tx: skt.sendall(tx + b'\n') else: # Connection EOF. _sl.info('no more rx') break _sl.info('skt.makefile done') except EOFError: _m.beeps(1) msg = 'client socket to {} has closed'.format(address) _sl.warning(msg) pass # POR. except ConnectionResetError as E: _m.beeps(1) # Usually not serious. msg = 'client {} closed connection'.format(address) _sl.info(msg) pass # POR. except ConnectionAbortedError as E: _m.beeps(2) # Perhaps a little more serious. msg = 'client {} aborted connection'.format(address) _sl.info(msg) pass # POR. except Exception as E: _m.beeps(3) errmsg = 'client {} error: {} @ {}'.format(address, E, _m.tblineno()) _sl.error(errmsg) pass # POR. finally: msg = 'handle_connection: close -> %d open' % NOCX _sl.info(msg) NOCX -= 1 try: skt.close() except: pass
def logFileThread(): """Consume LFQ, writing to the log file.""" # Additionally, when VERBOSE, write to screen. # When VERBOSE, the log file can be null. global CHK_UTC_TS, LOG_PFN, LFTSTOPPED me = 'LFT' # No thread-local vars bcs only one thread. _sl.extra(me + ' begins') try: rplrc = 0 # Records per log roll check. wlpfn = None # Working version of LOG_PFN. while True: if LFTSTOP: _sl.extra('STOPping')#$# LFTSTOPPED = True log_close() return try: # Get a logrec from the input queue, with a 1-sec timeout. logrec = LFQ.get(block=True, timeout=1).rstrip() # Strip the '\n'. # Log file? (Via LOG_PATH. Can be null when VERBOSE.) if LOG_PATH: # Log roll check every 1-sec. if UTC_TS > (CHK_UTC_TS + 1): CHK_UTC_TS = UTC_TS # Were there records in the previous 1-sec? if rplrc > 0: # Flush. LOG_FILE.flush() os.fsync(LOG_FILE.fileno()) # Dots? if not VERBOSE: _sw.iw('.') rplrc = 0 # Log roll? wlpfn = current_log_pfn() if wlpfn != LOG_PFN: log_close() LOG_PFN = wlpfn # Ensure LOG_FILE. if not LOG_FILE: log_open() # Output. if LOG_FILE: LOG_FILE.write(logrec + '\n') rplrc += 1 else: errmsg = '!!!' # !TODO! print('** no LOG_FILE:', LOG_PFN)#$# # No log file? else: if not VERBOSE: raise ValueError('no LOG_FILE from: ' + LOG_PFN) # VERBOSE? (Custom output to screen.) if VERBOSE: try: a = logrec.split(_, 9) ffv = a.pop(0) b = json.loads(a.pop(-1)) b['sl'] = _sl ### id, si, el, sl, msg = b['_id'], b['_si'], b['_el'], b['_sl'], b.get('_msg', 'None') ### VM.main(*a, **b) except Exception as E: errmsg = str(E) # In case _sl is incapacitated... _m.beeps(3) print('!! ' + logrec + ' !! ' + errmsg + ' !!') except queue.Empty: pass except Exception as E: errmsg = '%s: %s @ %s' % (me, E, _m.tblineno()) + '\n' + \ '%s: LR: %s' % (me, repr(logrec)) DOSQUAWK(errmsg) raise finally: _sl.extra(me + ' ends')