def sep_(sl, local, name, width, prefix, fillchar): """Make separator line for sepBegin, sepEnd.""" # sl Falsey -> separator line is returned but not logged. if local: ts, sep = _dt.locut(_dt.utcut()), 'L' else: ts, sep = _dt.utcut(), 'U' sep = '{}{} '.format(prefix, _dt.ut2iso(ts, sep=sep)) if name: sep += '{} '.format(name) n = width - len(sep) if n > 0: sep += n * fillchar if sl: sl._log(sep) return sep
def exportFile(historical, xfi): """Export a file (from info dict).""" fn = xfi['filename'] me = 'exportFile(%s, %s)' % (str(historical), fn) _sl.info('%s %s %s' % (_dt.ut2iso(_dt.locut()), fn, 'h' if historical else ''))#$# nb2e = 0 # Finally references. try: # Safety flush. loadrecs2db() # How many bytes of file is to be exported? fskip = xfi['processed'] fsize = xfi['size'] nb2e = fsize - fskip if nb2e <= 0: return # 2/2: Optionally append stuff to logfile to # simulate intraprocess mofification. # 151213-02.log -> 50,262 bytes -> 51,630 bytes. # Still exists? pfn = os.path.normpath(WPATH + '/' + fn) if not os.path.isfile(pfn): _m.beeps(1) errmsg = '%s: file dne' % me _sl.warning(errmsg) return # .gz files are always treated as historical, and # the whole file is read. (No seek!) if pfn.endswith('.gz'): with gzip.open(pfn, 'r', encoding=ENCODING, errors=ERRORS) as f: for x, logrec in enumerate(f): if not (x % 1000): _sw.iw('.') logrec2loadrecs(logrec) return # Uncompressed files are read as text, with an initial # seek from the SOF. The file is read to its end, even # if this goes beyond the size given, which will # happen if XLOG appends to this file while we're # processing it. This will result in the appendage # being reprocessed next time around, but this is # harmless bcs logrec and heartbeat processing is # able to handle (skip) duplicates. with open(pfn, 'r', encoding=ENCODING, errors=ERRORS) as f: if fskip > 0: _sl.info('skipping {:,d} bytes'.format(fskip)) f.seek(fskip) for x, logrec in enumerate(f): if not (x % 1000): _sw.iw('.') logrec2loadrecs(logrec) except Exception as E: nb2e *= -1 # Prevent 'processed' update. errmsg = '%s: %s @ %s' % (me, E, _m.tblineno()) DOSQUAWK(errmsg) raise finally: # End dots. _sw.nl() # Close src file. try: f.close except: pass # Update 'processed'? if nb2e > 0 and not TESTONLY: xfi['processed'] = fsize z = {'filename': xfi['filename'], 'processed': xfi['processed']} FFWDB.update(z) # Flush heartbeats and loadrecs. flushHeartbeats() loadrecs2db()
def xlog(): global LFTSTOP me, action = 'main', '' try: _sl.info(me + ' begins')#$# _sl.info() _sl.info(' host: ' + HOST) _sl.info(' ippfx: ' + IPPFX) _sl.info(' port: ' + str(PORT)) _sl.info(' hp: ' + str(HP)) _sl.info(' log_path: ' + LOG_PATH) _sl.info(' verbose: ' + str(VERBOSE)) if VERBOSE: _sl.info(' viewer: ' + str(VIEWER)) _sl.info(' vm: ' + repr(VM)) _sl.info() startLogFileThread() # Fake a log record from self. Fake the json'd dict. Use '0.0.0.0' as self. z = '%s%s{"_id": "%s", "_si": "%s", "_el": %d, "_sl": "%s", "_msg": "%s"}' %\ ('0.0.0.0', _, '----', '----', 0, '_', (me + ' begins @ %s' % _dt.ut2iso(_dt.locut()))) (rc, rm, newrec) = reformatLogrec(z) LFQ.put(newrec) _sl.info('starting server on %r' % (HP, )) server = ThreadedServer(HP, Handler) server.daemon_threads = True # !!! Crucial! (Otherwise responder threads never exit.) ip, port = server.server_address server_thread = threading.Thread(target=server.serve_forever) server_thread.daemon = True server_thread.start() _sl.info('server running in %s' % server_thread.name) while True: if XLOGSTOP: break time.sleep(1) _sl.warning('server shutdown') server.shutdown() _sl.warning('server close') server.server_close() except KeyboardInterrupt as E: errmsg = '{}: KeyboardInterrupt: {}'.format(me, E) DOSQUAWK(errmsg, beeps=1) pass###raise except Exception as E: errmsg = '{}: E: {} @ {}'.format(me, E, _m.tblineno()) DOSQUAWK(errmsg) raise finally: # Either: a '!STOP!' record, a KeyboardInterrupt, or an Exception. if LFT: # Fake a received log record. z = '%s%s{"_id": "%s", "_el": %d, "_sl": "%s", "_msg": "%s"}' %\ ('0.0.0.0', _, '----', 0, '_', (me + ' ends @ %s' % _dt.ut2iso(_dt.locut()))) (rc, rm, newrec) = reformatLogrec(z) LFQ.put(newrec) # Wait some to let LFT empty tis queue. tw, w, mt = 0, 0.1, False while tw < 10: time.sleep(w) tw += w if LFQ.empty(): mt = True break if not mt: errmsg = 'LFT did not empty its queue' _sl.error(errmsg) # Shut down log file writing thread. LFTSTOP = True try: LFT.join(10) if LFTSTOPPED: pass else: errmsg = 'LFT didn\'t acknowledge STOP request' _sl.error(errmsg) except Exception as E: errmsg = 'LFT.join(10): %s' % E DOSQUAWK(errmsg) raise try: LOG_FILE.close() except: pass _sl.info(me + ' ends')#$#