def shutdown(): # A shortcut for the the post-main section of an app shutdown. me = argsMe()#!#margsMe() msg = '' #!#msg += '%s ends @ %s' % (me, mNowISOlongCS(True)) # !!! <<< msg += '%s ends @ %s' % (me, dt.ut2isofs(dt.locut())) # !!! <<< msg += '\n' #!#msg += '## <%s> %s' % (mNowISOlongCS(True), 50*'=') # !!! <<< msg += '## <%s> %s' % (dt.ut2isofs(dt.locut()), 50*'=') # !!! <<< return msg
def startup(): # A shortcut for the the pre-maininits() section of an app startup. me = argsMe()#!#margsMe() msg = '' #!#msg += '## <%s> %s' % (mNowISOlongCS(True), 50*'-') # !!! <<< msg += '## <%s> %s' % (dt.ut2isofs(dt.locut()), 50*'-') # !!! <<< msg += '\n' #!#msg += '%s begins @ %s' % (me, mNowISOlongCS(True)) # !!! <<< msg += '%s begins @ %s' % (me, dt.ut2isofs(dt.locut())) # !!! <<< return me, msg
def ownHeartbeat(uu=None): me = 'ownHeartbeat' logrec = None try: if uu is None: uu = _dt.utcut() ul = _dt.locut(uu) uuts = '%15.4f' % uu # 15.4, unblanked fraction. uuiosfs = _dt.ut2isofs(uu) uliosfs = _dt.ut2isofs(ul) rxts = txts = uuts kvs = {'_id': SRCID, '_si': SUBID, '_el': HB_EL, '_sl': HB_SL, '_ip': None, '_ts': uuts, 'dt_loc': uliosfs, 'dt_utc': uuiosfs} kvsa = json.dumps(kvs, ensure_ascii=True, sort_keys=True) kvsab = kvsa.encode(encoding=ENCODING, errors=ERRORS) h = hashlib.sha1() h.update(kvsab) sha1x = h.hexdigest() _ = '\t' logrec = '%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s' %('1', _, rxts, _, txts, _, SRCID, _, SUBID, _, HB_EL, _, HB_SL, _, sha1x, _, kvsa) except Exception as E: errmsg = '%s: %s @ %s' % (me, E, _m.tblineno()) DOSQUAWK(errmsg) raise finally: return logrec
def sep_(sl, local, name, width, prefix, fillchar): """Make separator line for sepBegin, sepEnd.""" # sl Falsey -> separator line is returned but not logged. if local: ts, sep = _dt.locut(_dt.utcut()), 'L' else: ts, sep = _dt.utcut(), 'U' sep = '{}{} '.format(prefix, _dt.ut2iso(ts, sep=sep)) if name: sep += '{} '.format(name) n = width - len(sep) if n > 0: sep += n * fillchar if sl: sl._log(sep) return sep
def watcherThread(): """A thread to watch WPATH for files to process.""" global LOADRECS, FFWDB, FWTRUNNING, FWTSTOP, FWTSTOPPED LOADRECS = [] me = 'watcher thread' try: FWTRUNNING = True assert XLOGDB, 'no XLOGDB' # Connect to FlatFileWatchDataBase. FFWDB = ffwdb.FFWDB(FFWDBPFN) assert FFWDB, 'no FFWDB' uu = 0 # Unix Utc. while not FWTSTOP: # flushHeartbeats() # Wait out INTERVAL. z = time.time() w = INTERVAL - (z - uu) if w > 0: _sw.wait(w) uu = _dt.utcut() ul = _dt.locut(uu) uuts = '%15.4f' % uu # 15.4, unblanked fraction. uuiosfs = _dt.ut2isofs(uu) uliosfs = _dt.ut2isofs(ul) # Heartbeat? if OWNHEARTBEAT: logrec = ownHeartbeat(uu) addHeartbeat(logrec) # Files? t0 = time.perf_counter(); fis = getFIs(uu) t1 = time.perf_counter(); if TIMINGS: _sl.warning(' getFIs: {:9,.1f} ms'.format((1000*(t1-t0)))) if not fis: errmsg = 'no logfiles @ ' + uliosfs raise Exception(errmsg) # Update FFWDB. # Freshen the "acquired" timestamp. # Delete entries for nonexistent files. t0 = time.perf_counter(); filenames = [fi['filename'] for fi in fis] filenames.sort() # Compare real (nf) and db (nx) counts. nf = len(filenames) nx = FFWDB.count() if nf < nx: nf, nx = nf, nx elif nf > nx: nf, nx = nf, nx else: nf, nx = nf, nx FFWDB.acquired(filenames, uu) for fi in fis: z = updateDB(fi) t1 = time.perf_counter(); if TIMINGS: _sl.warning('updateDBs: {:9,.1f} ms'.format((1000*(t1-t0)))) # Find the oldest, newest unfinished files in DB. t0 = time.perf_counter(); o_dbfi, n_dbfi = FFWDB.oldestnewest('u') t1 = time.perf_counter(); if TIMINGS: _sl.warning(' oldest: {:9,.1f} ms'.format((1000*(t1-t0)))) # Something unfinished? if o_dbfi: fn = o_dbfi['filename'] historical = (fn != n_dbfi['filename']) # Export the file. exportFile(historical, o_dbfi) # !CHANGE! # Move oldest finished file? # It must not be the only, and therefore "live", file. if DONESD: o_dbfi, n_dbfi = FFWDB.oldestnewest('f') if o_dbfi and (n_dbfi['filename'] != o_dbfi['filename']): t0 = time.perf_counter(); doneWithFile(o_dbfi['filename']) t1 = time.perf_counter(); if TIMINGS: _sl.warning(' moved: {:9,.1f} ms'.format((1000*(t1-t0)))) if ONECHECK: FWTSTOP = True except KeyboardInterrupt as E: _m.beeps(1) # watcherThread: msg = '1: {}: KeyboardInterrupt: {}'.format(me, E) _sl.warning(msg) ###---DOSQUAWK(errmsg, beeps=1) pass###raise # Let the thread exit. Avoids "Exception in thread...". except Exception as E: errmsg = '%s: E: %s @ %s' % (me, E, _m.tblineno()) DOSQUAWK(errmsg) raise finally: # Flush heartbeats and loadrecs. flushHeartbeats() loadrecs2db() if FWTSTOP: FWTSTOPPED = True FFWDB.disconnect() _sl.info('%s exits. STOPPED: %s' % (me, str(FWTSTOPPED))) FWTRUNNING = False
def exportFile(historical, xfi): """Export a file (from info dict).""" fn = xfi['filename'] me = 'exportFile(%s, %s)' % (str(historical), fn) _sl.info('%s %s %s' % (_dt.ut2iso(_dt.locut()), fn, 'h' if historical else ''))#$# nb2e = 0 # Finally references. try: # Safety flush. loadrecs2db() # How many bytes of file is to be exported? fskip = xfi['processed'] fsize = xfi['size'] nb2e = fsize - fskip if nb2e <= 0: return # 2/2: Optionally append stuff to logfile to # simulate intraprocess mofification. # 151213-02.log -> 50,262 bytes -> 51,630 bytes. # Still exists? pfn = os.path.normpath(WPATH + '/' + fn) if not os.path.isfile(pfn): _m.beeps(1) errmsg = '%s: file dne' % me _sl.warning(errmsg) return # .gz files are always treated as historical, and # the whole file is read. (No seek!) if pfn.endswith('.gz'): with gzip.open(pfn, 'r', encoding=ENCODING, errors=ERRORS) as f: for x, logrec in enumerate(f): if not (x % 1000): _sw.iw('.') logrec2loadrecs(logrec) return # Uncompressed files are read as text, with an initial # seek from the SOF. The file is read to its end, even # if this goes beyond the size given, which will # happen if XLOG appends to this file while we're # processing it. This will result in the appendage # being reprocessed next time around, but this is # harmless bcs logrec and heartbeat processing is # able to handle (skip) duplicates. with open(pfn, 'r', encoding=ENCODING, errors=ERRORS) as f: if fskip > 0: _sl.info('skipping {:,d} bytes'.format(fskip)) f.seek(fskip) for x, logrec in enumerate(f): if not (x % 1000): _sw.iw('.') logrec2loadrecs(logrec) except Exception as E: nb2e *= -1 # Prevent 'processed' update. errmsg = '%s: %s @ %s' % (me, E, _m.tblineno()) DOSQUAWK(errmsg) raise finally: # End dots. _sw.nl() # Close src file. try: f.close except: pass # Update 'processed'? if nb2e > 0 and not TESTONLY: xfi['processed'] = fsize z = {'filename': xfi['filename'], 'processed': xfi['processed']} FFWDB.update(z) # Flush heartbeats and loadrecs. flushHeartbeats() loadrecs2db()
def xlog(): global LFTSTOP me, action = 'main', '' try: _sl.info(me + ' begins')#$# _sl.info() _sl.info(' host: ' + HOST) _sl.info(' ippfx: ' + IPPFX) _sl.info(' port: ' + str(PORT)) _sl.info(' hp: ' + str(HP)) _sl.info(' log_path: ' + LOG_PATH) _sl.info(' verbose: ' + str(VERBOSE)) if VERBOSE: _sl.info(' viewer: ' + str(VIEWER)) _sl.info(' vm: ' + repr(VM)) _sl.info() startLogFileThread() # Fake a log record from self. Fake the json'd dict. Use '0.0.0.0' as self. z = '%s%s{"_id": "%s", "_si": "%s", "_el": %d, "_sl": "%s", "_msg": "%s"}' %\ ('0.0.0.0', _, '----', '----', 0, '_', (me + ' begins @ %s' % _dt.ut2iso(_dt.locut()))) (rc, rm, newrec) = reformatLogrec(z) LFQ.put(newrec) _sl.info('starting server on %r' % (HP, )) server = ThreadedServer(HP, Handler) server.daemon_threads = True # !!! Crucial! (Otherwise responder threads never exit.) ip, port = server.server_address server_thread = threading.Thread(target=server.serve_forever) server_thread.daemon = True server_thread.start() _sl.info('server running in %s' % server_thread.name) while True: if XLOGSTOP: break time.sleep(1) _sl.warning('server shutdown') server.shutdown() _sl.warning('server close') server.server_close() except KeyboardInterrupt as E: errmsg = '{}: KeyboardInterrupt: {}'.format(me, E) DOSQUAWK(errmsg, beeps=1) pass###raise except Exception as E: errmsg = '{}: E: {} @ {}'.format(me, E, _m.tblineno()) DOSQUAWK(errmsg) raise finally: # Either: a '!STOP!' record, a KeyboardInterrupt, or an Exception. if LFT: # Fake a received log record. z = '%s%s{"_id": "%s", "_el": %d, "_sl": "%s", "_msg": "%s"}' %\ ('0.0.0.0', _, '----', 0, '_', (me + ' ends @ %s' % _dt.ut2iso(_dt.locut()))) (rc, rm, newrec) = reformatLogrec(z) LFQ.put(newrec) # Wait some to let LFT empty tis queue. tw, w, mt = 0, 0.1, False while tw < 10: time.sleep(w) tw += w if LFQ.empty(): mt = True break if not mt: errmsg = 'LFT did not empty its queue' _sl.error(errmsg) # Shut down log file writing thread. LFTSTOP = True try: LFT.join(10) if LFTSTOPPED: pass else: errmsg = 'LFT didn\'t acknowledge STOP request' _sl.error(errmsg) except Exception as E: errmsg = 'LFT.join(10): %s' % E DOSQUAWK(errmsg) raise try: LOG_FILE.close() except: pass _sl.info(me + ' ends')#$#