def sep_(sl, local, name, width, prefix, fillchar): """Make separator line for sepBegin, sepEnd.""" # sl Falsey -> separator line is returned but not logged. if local: ts, sep = _dt.locut(_dt.utcut()), 'L' else: ts, sep = _dt.utcut(), 'U' sep = '{}{} '.format(prefix, _dt.ut2iso(ts, sep=sep)) if name: sep += '{} '.format(name) n = width - len(sep) if n > 0: sep += n * fillchar if sl: sl._log(sep) return sep
def ownHeartbeat(uu=None): me = 'ownHeartbeat' logrec = None try: if uu is None: uu = _dt.utcut() ul = _dt.locut(uu) uuts = '%15.4f' % uu # 15.4, unblanked fraction. uuiosfs = _dt.ut2isofs(uu) uliosfs = _dt.ut2isofs(ul) rxts = txts = uuts kvs = {'_id': SRCID, '_si': SUBID, '_el': HB_EL, '_sl': HB_SL, '_ip': None, '_ts': uuts, 'dt_loc': uliosfs, 'dt_utc': uuiosfs} kvsa = json.dumps(kvs, ensure_ascii=True, sort_keys=True) kvsab = kvsa.encode(encoding=ENCODING, errors=ERRORS) h = hashlib.sha1() h.update(kvsab) sha1x = h.hexdigest() _ = '\t' logrec = '%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s' %('1', _, rxts, _, txts, _, SRCID, _, SUBID, _, HB_EL, _, HB_SL, _, sha1x, _, kvsa) except Exception as E: errmsg = '%s: %s @ %s' % (me, E, _m.tblineno()) DOSQUAWK(errmsg) raise finally: return logrec
def update_ts(utcut=None): global UTC_TS, UTC_UT, UTC_TS_STR, UTC_YMD, UTC_HMS, LOC_YMD, LOC_HMS me = 'update_ts(%r)' % utcut try: if utcut: UTC_TS = utcut else: UTC_TS = _dt.utcut() UTC_TS_STR = '{:15.4f}'.format(UTC_TS) UTC_UT = int(UTC_TS) # Truncate to integer. utc = time.gmtime(UTC_UT) loc = time.localtime(UTC_UT) UTC_YMD = '%02d%02d%02d' % (utc.tm_year % 100, utc.tm_mon, utc.tm_mday) UTC_HMS = '%02d%02d%02d' % (utc.tm_hour, utc.tm_min, utc.tm_sec) LOC_YMD = '%02d%02d%02d' % (loc.tm_year % 100, loc.tm_mon, loc.tm_mday) LOC_HMS = '%02d%02d%02d' % (loc.tm_hour, loc.tm_min, loc.tm_sec) except Exception as E: errmsg = '{}: {} @ {}'.format(me, E, m.tblineno()) DOSQUAWK(errmsg) raise
def watcherThread(): """A thread to watch WPATH for files to process.""" global LOADRECS, FFWDB, FWTRUNNING, FWTSTOP, FWTSTOPPED LOADRECS = [] me = 'watcher thread' try: FWTRUNNING = True assert XLOGDB, 'no XLOGDB' # Connect to FlatFileWatchDataBase. FFWDB = ffwdb.FFWDB(FFWDBPFN) assert FFWDB, 'no FFWDB' uu = 0 # Unix Utc. while not FWTSTOP: # flushHeartbeats() # Wait out INTERVAL. z = time.time() w = INTERVAL - (z - uu) if w > 0: _sw.wait(w) uu = _dt.utcut() ul = _dt.locut(uu) uuts = '%15.4f' % uu # 15.4, unblanked fraction. uuiosfs = _dt.ut2isofs(uu) uliosfs = _dt.ut2isofs(ul) # Heartbeat? if OWNHEARTBEAT: logrec = ownHeartbeat(uu) addHeartbeat(logrec) # Files? t0 = time.perf_counter(); fis = getFIs(uu) t1 = time.perf_counter(); if TIMINGS: _sl.warning(' getFIs: {:9,.1f} ms'.format((1000*(t1-t0)))) if not fis: errmsg = 'no logfiles @ ' + uliosfs raise Exception(errmsg) # Update FFWDB. # Freshen the "acquired" timestamp. # Delete entries for nonexistent files. t0 = time.perf_counter(); filenames = [fi['filename'] for fi in fis] filenames.sort() # Compare real (nf) and db (nx) counts. nf = len(filenames) nx = FFWDB.count() if nf < nx: nf, nx = nf, nx elif nf > nx: nf, nx = nf, nx else: nf, nx = nf, nx FFWDB.acquired(filenames, uu) for fi in fis: z = updateDB(fi) t1 = time.perf_counter(); if TIMINGS: _sl.warning('updateDBs: {:9,.1f} ms'.format((1000*(t1-t0)))) # Find the oldest, newest unfinished files in DB. t0 = time.perf_counter(); o_dbfi, n_dbfi = FFWDB.oldestnewest('u') t1 = time.perf_counter(); if TIMINGS: _sl.warning(' oldest: {:9,.1f} ms'.format((1000*(t1-t0)))) # Something unfinished? if o_dbfi: fn = o_dbfi['filename'] historical = (fn != n_dbfi['filename']) # Export the file. exportFile(historical, o_dbfi) # !CHANGE! # Move oldest finished file? # It must not be the only, and therefore "live", file. if DONESD: o_dbfi, n_dbfi = FFWDB.oldestnewest('f') if o_dbfi and (n_dbfi['filename'] != o_dbfi['filename']): t0 = time.perf_counter(); doneWithFile(o_dbfi['filename']) t1 = time.perf_counter(); if TIMINGS: _sl.warning(' moved: {:9,.1f} ms'.format((1000*(t1-t0)))) if ONECHECK: FWTSTOP = True except KeyboardInterrupt as E: _m.beeps(1) # watcherThread: msg = '1: {}: KeyboardInterrupt: {}'.format(me, E) _sl.warning(msg) ###---DOSQUAWK(errmsg, beeps=1) pass###raise # Let the thread exit. Avoids "Exception in thread...". except Exception as E: errmsg = '%s: E: %s @ %s' % (me, E, _m.tblineno()) DOSQUAWK(errmsg) raise finally: # Flush heartbeats and loadrecs. flushHeartbeats() loadrecs2db() if FWTSTOP: FWTSTOPPED = True FFWDB.disconnect() _sl.info('%s exits. STOPPED: %s' % (me, str(FWTSTOPPED))) FWTRUNNING = False
testfn = '151213-01.log' # 174,586 bytes. testfn = '151213-02.log' # 48,894 bytes. historical = False testfi = FFWDB.select(testfn) ###!!!testfi['processed'] = 0 # 1/2: Optionally append stuff to logfile to # simulate intraprocess mofification. # 151213-02.log -> 50,262 bytes. # Doing this here causes no export bcs testfi # shows the file a fully processed. if False: # Update FFWDB and reget testfi. uu = _dt.utcut() fis = getFIs(uu) filenames = [fi['filename'] for fi in fis] filenames.sort() FFWDB.acquired(filenames, uu) for fi in fis: if fi['filename'] == testfn: testfi = updateDB(fi) exportFile(historical, testfi) shutDown() 1/1 # Test 2.