def testS2E(s2e): if not (TEST and s2e): return ###!!! return ###!!! me = 'testS2E' _sl.info() _sl.info('{:s}: {:,d} bytes'.format(me, len(s2e))) ne = 0 try: if s2e[-1] == '\n': _sl.info('ends with \\n') else: _sl.info('does not end with \\n') for x, logrec in enumerate(s2e.split('\n')): if not logrec: continue pass pass except Exception as E: rc = False errmsg = '%s: %s @ %s' % (me, E, _m.tblineno()) DOSQUAWK(errmsg) pass finally: return (ne == 0)
def argsRP(RP, trc=True): me, action, errmsg = 'argsRP', 'getting repetition period from: ' + repr(RP), '' try: if trc: SL.info(action) if not RP: # RP begins life as a string. RP = 0 else: s = RP[-1].upper() if s.isdigit(): s = 'S' RP += s if s == 'S': m = 1.0 elif s == 'M': m = 60.0 elif s == 'H': m = 3600.0 else: errmsg = 'unrecognisable repetition period multiplier: ' + s raise Exception #!#raise PgmStop2(me, action, errmsg) s = RP[:-1] try: rp = float(s) * m RP = rp # RP is now a float; seconds. msg = 'repetition period is %.1f secs' % (RP) if trc: SL.info(msg) return RP except: errmsg = 'error converting: ' + s raise Exception #!#raise PgmStop2(me, action, errmsg) pass except Exception as E: errmsg = '{}: {}: {} @ {}'.format(me, action, errmsg if errmsg else E, mi.tblineno()) SL.error(errmsg) raise '''...
def argsBoolean(key, name, default=None, stop=False): me, errmsg = 'argsBoolean', '' try: r = ARGSDICT[key] except: if stop: errmsg = 'no "{}" boolean arg'.format(name) raise Exception #!#raise PgmStop2(me, action, errmsg) else: SL.warning('args: {}: {} (default)'.format(name, default)) return default try: if r is None: # But not if ''. r = default if r == '': r = default # 140524: !!! Added! if r: r = Str2Bool(r, default) SL.info('args: {}: {}'.format(name, r)) return r except Exception as E: errmsg = '{}: {} @ {}'.format(me, errmsg if errmsg else E, mi.tblineno()) SL.error(errmsg) raise '''...
def addHeartbeat(logrec): global HEARTBEATS, NBEATS, NOLDBEATS me = 'addHeartbeat(%s)' % repr(logrec) try: # Unpack logrec. (fv, rxts, txts, srcid, subid, el, sl, sha1, kvs) = logrec2fields(logrec) if fv is None: return assert ((el == HB_EL) and (sl == HB_SL)), 'bad hb el (%s) or sl (%s)' % (repr(el), repr(sl)) assert txts, 'hb needs a txts' NBEATS += 1 rxts2, txts2 = float(rxts), float(txts) # Check staging dict. k = srcid + '|' + subid v = HEARTBEATS.get(k) if v: if not (txts2 > float(v[1])): # !MAGIC! Tuple index. NOLDBEATS += 1 return # logrec is new or newer: update staging dict. v = [_S(rxts2), _S(txts2), _S(srcid), _S(subid), _S(el), _S(sl), _S(sha1), _S(kvs)] # !!! Matches xlog/heartbeat table. HEARTBEATS[k] = v except Exception as E: errmsg = '%s: E: %s @ %s' % (me, E, _m.tblineno()) DOSQUAWK(errmsg) raise finally: pass
def logrec2loadrecs(logrec): """Convert logrec and add to loadrecs.""" global LOADRECS try: try: logrec = logrec.rstrip() # No \n. except: logrec = None if not logrec: return # Split logrec to fields. (fv, rxts, txts, srcid, subid, el, sl, sha1, kvs) = logrec2fields(logrec) if fv is None: if kvs: _sl.extra(kvs) # Comment. return rxts2, txts2 = float(rxts), float(txts) # Heartbeat? if el == HB_EL and sl == HB_SL: addHeartbeat(logrec) return # Debug. else: if logrec.find('"a"') > -1: logrec = logrec elif logrec.find('"e"') > -1: logrec = logrec else: logrec = logrec # Stash info for db loading (via batch commits). # # ( # rxts '1449993602.7583', # txts '1449993602.7554', # srcid 'nx01', # subid '____', # el '0', # sl 'h', # sha1 '9b8328e4bbd014c6e50786b00d0672aa81bead47', # kvs '{"_el": "0", "_id": "nx01", "_ip": "192.168.100.6", "_si": "____", "_sl": "h", "_ts": "1449993602.7554", "ae": "h", "dt_loc": "2015-12-13 00:00:02.7554", "dt_utc": "2015-12-13 08:00:02.7554"} # ) # z = [_S(rxts2), _S(txts2), _S(srcid), _S(subid), _S(el), _S(sl), _S(sha1), _S(kvs)] # !!! Matches xlog table. LOADRECS.append(z) # Commit batch? if len(LOADRECS) >= LOADCOMMITBATCHSIZE: loadrecs2db() except Exception as E: errmsg = '%s: E: %s @ %s' % (me, E, _m.tblineno()) DOSQUAWK(errmsg) raise finally: pass
def doneWithFile(filename): """Move filename to DONESD.""" me = 'doneWithFile(%s)' % repr(filename) _sl.info(me) moved = False # Pessimistic. try: # Moving? if not DONESD: return # SRC, SNK. src = os.path.normpath(WPATH + '/' + filename) snk = os.path.normpath(WPATH + '/' + DONESD + '/' + filename) # No SRC, already SNK? if not os.path.isfile(src): _m.beeps(1) errmsg = 'src dne' _sl.warning(errmsg) return if os.path.isfile(snk): _m.beeps(1) errmsg = 'snk already in %s' % DONESD _sl.warning(errmsg) return # Do the move. A failure is squawked and tolerated. try: shutil.move(src, snk) moved = True except Exception as E: moved = False _m.beeps(3) errmsg = 'moving %s to %s failed: %s' % (filename, DONESD, E) _sl.warning(errmsg) pass # POR. # Still SRC, no SNK? if os.path.isfile(src): moved = False _m.beeps(1) errmsg = 'src not moved' _sl.warning(errmsg) return if not os.path.isfile(snk): moved = False _m.beeps(1) errmsg = 'snk dne in %s' % DONESD _sl.warning(errmsg) return except Exception as E: errmsg = '%s: E: %s @ %s' % (me, E, _m.tblineno()) DOSQUAWK(errmsg) raise finally: if moved: FFWDB.delete(filename)
def extra(self, extra=None): try: # Return db.extra? if extra is None: csr = self.db.cursor() csr.execute('select extra from logfiles where inode=-1') z = csr.fetchone() if not z: return {} rd = json.loads(z[0]) return rd # Update db.extra? else: csr = self.db.cursor() csr.execute('select extra from logfiles where inode=-1') z = csr.fetchone() if not z: csr.execute('insert into logfiles (inode, extra) values (?, ?)', (-1, '')) '''... csr.execute('select extra from logfiles where inode=-1') z = csr.fetchone()[0] ...''' z = json.dumps(extra) csr.execute('update logfiles set extra=? where inode=?', (z, -1)) rd = self.extra() return rd except Exception as E: errmsg = 'FFWDB.extra: %s @ %s' % (E, tblineno()) raise RuntimeError(errmsg) finally: self.db.commit()
def ownHeartbeat(uu=None): me = 'ownHeartbeat' logrec = None try: if uu is None: uu = _dt.utcut() ul = _dt.locut(uu) uuts = '%15.4f' % uu # 15.4, unblanked fraction. uuiosfs = _dt.ut2isofs(uu) uliosfs = _dt.ut2isofs(ul) rxts = txts = uuts kvs = {'_id': SRCID, '_si': SUBID, '_el': HB_EL, '_sl': HB_SL, '_ip': None, '_ts': uuts, 'dt_loc': uliosfs, 'dt_utc': uuiosfs} kvsa = json.dumps(kvs, ensure_ascii=True, sort_keys=True) kvsab = kvsa.encode(encoding=ENCODING, errors=ERRORS) h = hashlib.sha1() h.update(kvsab) sha1x = h.hexdigest() _ = '\t' logrec = '%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s' %('1', _, rxts, _, txts, _, SRCID, _, SUBID, _, HB_EL, _, HB_SL, _, sha1x, _, kvsa) except Exception as E: errmsg = '%s: %s @ %s' % (me, E, _m.tblineno()) DOSQUAWK(errmsg) raise finally: return logrec
def getFI(fn, ts): """Return a FileInfo dict for _fn.""" me = 'getFI(%s)' % repr(fn) fi = None try: if not REFNPATTERN.match(fn): errmsg = 'invalid filename: %s' % fn raise Exception(errmsg) yymmdd = fn[:6] ymd = '20' + yymmdd hh = fn[7:9] pfn = os.path.normpath(WPATH + '/' + fn) try: st = os.stat(pfn) size = st.st_size mtime = st.st_mtime except Exception as E: errmsg = 'stat(%s) failed' % pfn raise Exception(errmsg) fi = {'filename': fn, 'ymd': ymd, 'hh': hh, 'modified': mtime, 'size': size, 'acquired': ts} except Exception as E: fi = None # Zap! errmsg = '%s: %s @ %s' % (me, E, _m.tblineno()) DOSQUAWK(errmsg) raise finally: return fi
def connect(self): """Connect (if not rate-waiting) to hostport, and test. Return: -1: failed. 0: waiting. 1: successful.""" try: #$#print('>> self.connect:', repr(self.skt))#$# # Already? if self.skt: self.cxstatus = 1 return # Throttle connection rate. self.cxwait = self.cxrate - (time.time() - self.cxts) if self.cxwait > 0: self.cxstatus = 0 return # Try a connect. #$#print('-- self.connect try')#$# self.cxerrmsg = None self.cxts = time.time() self.skt = socket(AF_INET, SOCK_STREAM) self.skt.settimeout(self.cxtimeout) self.skt.connect(self.hostport) self.skt.settimeout(self.txtimeout) self.csstatus = 1 self.cxfails = 0 except Exception as E: errmsg = 'connect: %s @ %s' % (E, _m.tblineno()) #$#print('** self.connect:', errmsg)#$# self.cxerrmsg = errmsg #$#ml.error(errmsg)#$# # No logging here! self.disconnect() self.cxstatus = -1 self.cxfails += 1 finally: return self.cxstatus
def test(): me, action = 'TEST', '' try: _sl.info('test begins') _sl.info('(host, port): ' + repr((HOST, PORT))) if True: # Try numbered messages to a listening xlog. xl = l_xlog.XLog(xloghost=HOST, xlogport=PORT, xsrcid='_SRC', xsubid='_SUB', xlogel='0', xlogsl='_', sl=None, sw=_sw, txrate=0.1)###txrate=0.02) 1/1 for x in range(int(COUNT)): if False: # 0: simple str message sent as xl.null() msg = 'n%03d' % (x+1) _sl.info('0: %s' % msg) xl.null(msg, srcid=':%03d'%(101+x), subid='.%03d'%(201+x)) 1/1 if True: # 1: simple str message sent via msg2xlog() msg = 'n%03d' % (x+1) _sl.info('1: %s' % msg) xl.msg2xlog(msg, srcid=':%03d'%(101+x), subid='.%03d'%(201+x), el=1, sl=2) 1/1 if False: # 2: ready-to-use dict sent via logd2xlog() logd = {'_id': ':%03d'%(301+x), '_si': '.%03d'%(401+x), '_el': 3, '_sl': 4, '_msg': 'n%03d' % (x+1), 'n': x+1} xl.logd2xlog(logd) 1/1 if False: # 3: ready-to-use json'd dict sent via logdj2xlog() logd = {'_id': ':%03d'%(501+x), '_si': '.%03d'%(601+x), '_el': 5, '_sl': 6, '_msg': 'n%03d' % (x+1), 'n': x+1} logdj = json.dumps(logd, ensure_ascii=ENSURE_ASCII, sort_keys=SORT_KEYS) xl.logdj2xlog(logdj) 1/1 time.sleep(float(RATE)) 1/1 while xl.busy(): _m.beep() time.sleep(0.5) xl.close() 1/1 _sl.info('test ends') 1/1 except Exception as E: errmsg = 'test: {} @ {}'.format(str(E), _m.tblineno()) _sl.error(errmsg) raise finally: 1/1
def xlog2db(): global SRCID, SUBID, WPATH, DONESD, INTERVAL global FFWDBPFN, FWTSTOP, FWTSTOPPED, XLOGDB me, action = 'xlog2db', '' try: _sl.info(me + ' begins')#$# SRCID = _a.ARGS['--srcid'] SUBID = _a.ARGS['--subid'] WPATH = _a.ARGS['--wpath'].rstrip('/').rstrip('/') DONESD = _a.ARGS['--donesd'] INTERVAL = float(_a.ARGS['--interval']) DBCFG = _a.ARGS['--xlogdb'] # DB connection configuration, as a string. DBCFG = eval(DBCFG) # ..., as a dict. _sl.info() _sl.info(' srcid: ' + SRCID) _sl.info(' subid: ' + SUBID) _sl.info(' wpath: ' + WPATH) _sl.info(' done sd: ' + DONESD) _sl.info(' interval: ' + str(INTERVAL)) _sl.info(' db cfg: ' + repr(DBCFG)) _sl.info() # FFW DB PFN. DB creation must be done in watcherThread. FFWDBPFN = os.path.normpath(WPATH + '/xlog2db.s3') # Open sink db. XLOGDB = mc.connect(host=DBCFG['host'], user=DBCFG['user'], password=DBCFG['password'], db=DBCFG['database'], raise_on_warnings=True) # Start watcher() in a thread. watcher_thread = threading.Thread(target=watcherThread) watcher_thread.start() # Wait for startup. while not FWTRUNNING: time.sleep(0.010) # Wait for shutdown. while FWTRUNNING: time.sleep(1) # Ctrl-c to stop & exit. except KeyboardInterrupt as E: _m.beeps(1) msg = '2: {}: KeyboardInterrupt: {}'.format(me, E) _sl.warning(msg) ###---DOSQUAWK(errmsg, beeps=1) pass###raise except Exception as E: errmsg = '{}: E: {} @ {}'.format(ME, E, _m.tblineno()) DOSQUAWK(errmsg) raise finally: if watcher_thread and FWTRUNNING: FWTSTOP = True watcher_thread.join(3 * INTERVAL) _sl.info('thread STOPPED: %s' % FWTSTOPPED)
def delete(self, inode): try: csr = self.db.cursor() csr.execute('delete from logfiles where inode=?', (inode, )) except Exception as E: errmsg = 'FFWDB.delete: %s @ %s' % (E, tblineno()) raise RuntimeError(errmsg) finally: self.db.commit()
def get_args(me=None, inipfn=None, useini=False): # ^^^ default to no ini file usage. global SYSARGVS, ARGS, INIPFN, INI, ME, VERSION 1/1 try: GLOBAL_INITS() # Hide/disable old version stuff. doc = version = None help = docopt = False clkvs = True # Determine ME. if me: ME = me else: (ME, _) = os.path.splitext(os.path.split(sys.argv[0])[1]) # Get command line arguments, holding them aside. 1/1 get_sysargvs(docstr=doc, help=help, docopt=docopt, clkvs=clkvs) SYSARGVS = SYSARGVS 1/1 # The presence of an inipfn turns on useini. if inipfn is not None: useini = True # Determine INI, if used. if useini: INIPFN = ME + '.ini' INIPFN = SYSARGVS.get('ini', INIPFN) INIPFN = inipfn or INIPFN get_ini() INI = INI 1/1 # ARGS <- combine INI [args] and SYSARGVS (clkvs). 1/1 try: ARGS = dict([(k, True if v is None else v) for k, v in INI['args'].items()]) # ^^^ ^^^ # ^^^ When ini kv has no = and no v. except: ARGS = {} ARGS = ARGS 1/1 ARGS.update(dict([(k, v) for k, v in SYSARGVS.items()])) ARGS = ARGS 1/1 return ME except Exception as E: 1/1 errmsg = '{}: {} @ {}' % (me, E, _m.tblineno()) raise RuntimeError(errmsg) finally: 1/1
def inodes(self): try: csr = self.db.cursor() csr.execute('select inode from logfiles where inode>0') return [z[0] for z in csr] except Exception as E: errmsg = 'FFWDB.inodes: %s @ %s' % (E, tblineno()) raise RuntimeError(errmsg) finally: self.db.commit()
def __init__( self, xloghost=None, xlogport=None, xsrcid=None, xsubid=None, xlogel="2", xlogsl="_", sl=None, sw=None, txrate=0.02, nop=False, ): me = "XLog.__init__" self.nop = nop # Make a do-nothing logger. self.xlog = None # Default no logging until host & port connected. self.xloghost = xloghost # xlog server host IP. self.xlogport = xlogport # xlog server port. # srcid, subid, el & sl are defaults used by message builders if no # overrides are supplied in the log dict. self.xsrcid = xsrcid # Initial source ID. self.xsubid = xsubid # Initial sub ID. self.xlogel = xlogel # Default error-level (info). self.xlogsl = xlogsl # Default sub-level (base). # Optional simple logger and screenwriter objects. self.sl = sl # Optional simple logger to mimic what's being # sent to xlog. self.sw = sw # Optional screen writer object for # error messages (otherwise print). # Message queue. self.q = None # Queue of messages to send. # Processing thread. self.t = None # Thread to send messages. # Stop signal. self.stop = False # External signal to stop XLog instance. self.stopped = False # When stopped by self.stop. # Count messages. self.nmsgs = 0 # Number of messages sent. # if self.xloghost and self.xlogport: try: if not self.nop: self.xlog = XLogTxRx((self.xloghost, self.xlogport), txrate=txrate) self.q = queue.Queue(10000) # 10^4 self.t = threading.Thread(target=self._xlogthread) # ??? daemon? self.t.start() if FAIL_CONNECT: 1 / 0 except Exception as E: _m.beeps(10) errmsg = "{}: {}: {} @ {}".format(me, "setup failed", E, _m.tblineno()) self._p("** " + errmsg) raise Exception(errmsg)
def count(self, inode=None): try: csr = self.db.cursor() if inode: csr.execute('select count(*) from logfiles where inode=?', (inode, )) else: csr.execute('select count(*) from logfiles') try: return csr.fetchone()[0] except: return None except Exception as E: errmsg = 'FFWDB.count: %s @ %s' % (E, tblineno()) raise RuntimeError(errmsg) finally: self.db.commit()
def acquired(self, inodes, ts): # A bulk 'acquired' timestamp update bcs updates are slow. if not (inodes and ts): return try: csr = self.db.cursor() if len(inodes) == 1: sql = 'update logfiles set acquired=? where inode=%d' % (inodes[0]) else: sql = 'update logfiles set acquired=? where inode in %s' % (str(inodes)) csr.execute(sql, (ts, )) except Exception as E: errmsg = 'FFWDB.acquired: %s @ %s' % (E, tblineno()) raise RuntimeError(errmsg) finally: self.db.commit()
def select(self, inode): try: self.db.row_factory = sqlite3.Row csr = self.db.cursor() csr.execute('select * from logfiles where inode=?', (inode, )) z = csr.fetchone() if not z: return None fi = {} fi.update(z) return fi except Exception as E: errmsg = 'FFWDB.select: %s @ %s' % (E, tblineno()) raise RuntimeError(errmsg) finally: self.db.commit()
def all(self): fis = [] try: self.db.row_factory = sqlite3.Row csr = self.db.cursor() csr.execute('select * from logfiles where inode>0') for z in csr: fi = {} fi.update(z) fis.append(fi) return fis except Exception as E: errmsg = 'FFWDB.all: %s @ %s' % (E, tblineno()) raise RuntimeError(errmsg) finally: self.db.commit()
def getFIs(ts): """Return a list of FileInfo dicts of current files.""" me = 'getFIS' fis = [] try: for filename in sorted([fn for fn in os.listdir(WPATH) if REFNPATTERN.match(fn)]): fi = getFI(filename, ts) if not fi: continue fis.append(fi) except Exception as E: fis = None # ??? Zap all? errmsg = '%s: %s @ %s' % (me, E, _m.tblineno()) DOSQUAWK(errmsg) raise finally: return fis
def insert(self, fi): try: inode = fi['inode'] if self.count(inode): raise ValueError('FFWDB.insert: %d already in db' % (inode)) ks, qs, vs = [], [], [] for k, v in fi.items(): ks.append(k) qs.append('?') vs.append(v) sql = 'insert into logfiles (%s) values (%s)' % (', '.join(ks), ', '.join(qs)) csr = self.db.cursor() csr.execute(sql, vs) except Exception as E: errmsg = 'FFWDB.insert: %s @ %s' % (E, tblineno()) raise RuntimeError(errmsg) finally: self.db.commit() return self.select(inode)
def loadrecs2db(): """Load a batch into db.""" global LOADRECS, NOLOAD, NDUPE, NNEW try: z = str(len(LOADRECS)) except: z = 'None' me = 'loadrecs2db(%s)' % (z) try: if (not LOADRECS) or NOLOAD: return assert XLOGDB, 'no XLOGDB' for lr in LOADRECS: sha1 = lr[6] if len(sha1) != 40: lr = lr raise ValueError('funny SHA1: ' + repr(sha1)) # Already? try: c = XLOGDB.cursor() c.execute('select count(*) from xlog where sha1=%s', (sha1,)) if c.fetchone()[0]: NDUPE += 1 continue finally: c.close() # Insert into [xlog]. try: c = XLOGDB.cursor() sql = inssqlxlog = 'insert into xlog (%s) values (%s)' % (DB_FNL_XLOG, DB_FIL_XLOG) c.execute(sql, lr) NNEW += 1 finally: c.close() pass except Exception as E: errmsg = '%s: E: %s @ %s' % (me, E, _m.tblineno()) DOSQUAWK(errmsg) raise finally: XLOGDB.commit() LOADRECS = []
def oldest(self, unfinished=True): try: self.db.row_factory = sqlite3.Row csr = self.db.cursor() if unfinished: csr.execute('select * from logfiles where (processed < size) order by modified asc limit 1') else: csr.execute('select * from logfiles order by modified desc limit 1') z = csr.fetchone() if not z: return None z = csr.fetchone() if not z: return None fi = {} fi.update(z) return fi except Exception as E: errmsg = 'FFWDB.oldest: %s @ %s' % (E, tblineno()) raise RuntimeError(errmsg) finally: self.db.commit()
def argsString(key, name, default=None, stop=False): me, errmsg = 'argsString', '' try: r = ARGSDICT[key].replace('~me~', argsMe()) # !LC! except: if stop: errmsg = 'no "{}" string arg'.format(name) raise Exception #!#raise PgmStop2(me, action, errmsg) else: SL.warning('args: {}: {} (default)'.format(name, default)) return default try: if r is None: # But not if ''. r = default r = str(r) SL.info('args: {}: {}'.format(name, r)) return r except Exception as E: errmsg = '{}: {} @ {}'.format(me, errmsg if errmsg else E, mi.tblineno()) SL.error(errmsg) raise '''...
def update(self, fi): try: inode = fi['inode'] if not self.count(inode): raise ValueError('FFWDB.update: %d not in db' % (inode)) kvs, vs = '', [] for k, v in fi.items(): if k == 'inode': continue if kvs: kvs += ', ' kvs += (k + '=?') vs.append(v) vs.append(inode) sql = 'update logfiles set %s where inode=?' % kvs csr = self.db.cursor() csr.execute(sql, vs) except Exception as E: errmsg = 'FFWDB.update: %s @ %s' % (E, tblineno()) raise RuntimeError(errmsg) finally: self.db.commit() return self.select(inode)
def flushHeartbeats(): global HEARTBEATS me = 'flushHeartbeats' try: if not HEARTBEATS: return assert XLOGDB, 'no XLOGDB' for k, v in HEARTBEATS.items(): txts, srcid, subid = float(v[1]), v[2], v[3] # !MAGIC! tuple indices. try: c = XLOGDB.cursor() c.execute('select txts from heartbeat where srcid=%s and subid=%s', (srcid, subid)) try: xtxts = c.fetchone()[0] except: xtxts = None finally: c.close() try: c = XLOGDB.cursor() if xtxts is None: sql = 'insert into heartbeat (%s) values (%s)' % (DB_FNL_HBS, DB_FIL_HBS) c.execute(sql, v) elif txts > xtxts: sql = 'update heartbeat set %s where srcid=%s and subid=%s' % (DB_FUL_HBS, '%s', '%s') v.append(srcid) v.append(subid) c.execute(sql, v) else: continue finally: c.close() pass except Exception as E: errmsg = '%s: E: %s @ %s' % (me, E, _m.tblineno()) DOSQUAWK(errmsg) raise finally: XLOGDB.commit()
def updateDB(ufi): """Update FFWDB from given file info dict. Returns dbfi.""" me = 'updateDB' dbfi = None try: dbfi = FFWDB.select(ufi['filename']) # Insert? if not dbfi: z = copy.copy(ufi) z['processed'] = 0 dbfi = FFWDB.insert(z) # Returns inserted. z = None if not dbfi: raise ValueError('db insertion failed') # Update: else: if dbfi['modified'] != ufi['modified'] or \ dbfi['size'] != ufi['size']: z = {} z['filename'] = ufi['filename'] z['modified'] = ufi['modified'] z['size'] = ufi['size'] z['acquired'] = ufi['acquired'] dbfi = FFWDB.update(z) # Returns updated. assert dbfi, 'no dbfi returned from update' z = None else: pass pass pass except Exception as E: dbfi = None errmsg = '%s: E: %s @ %s' % (me, E, _m.tblineno()) DOSQUAWK(errmsg) raise finally: return dbfi
def get_args_docopt(doc=None, version=None, me=None, help=True, docopt=True, clkvs=False, inipfn=None, useini=True): global ARGS, ME, INIPFN, USEINI, INI, VERSION, CWD 1/1 try: GLOBAL_INITS() if me: ME = me VERSION = version USEINI = useini if inipfn: INIPFN = inipfn else: INIPFN = ME + '.ini' CWD = os.getcwd() get_sysargvs(docstr=doc, help=help, docopt=docopt, clkvs=clkvs) get_ini() # Build ARGS from INI[args] and # SYSARGVS (overrides via docopt). try: if USEINI and INI: ARGS = dict([(k, True if v is None else v) for k, v in INI['args'].items()]) else: ARGS = {} # None's from docopt clobber ini values. ###ARGS.update(dict([(k, v) for k, v in SYSARGVS.items()])) for k, v in SYSARGVS.items(): if v is not None: ARGS[k] = v return ME except Exception as E: raise except Exception as E: errmsg = 'get_args: %s @ %s' % (E, _m.tblineno()) 1/1 raise finally: 1/1
def argsFloat(key, name, default=None, stop=False): me, errmsg = 'argsFloat', '' try: r = ARGSDICT[key] except: if stop: errmsg = 'no "{}" float arg'.format(name) raise Exception #!#raise PgmStop2(me, action, errmsg) else: SL.info('args: {}: {} (default)'.format(name, default)) return default try: if (r is None) or (r == ''): # Include ''. r = default if r: r = float(r) SL.info('args: {}: {}'.format(name, r)) return r except Exception as E: errmsg = '{}: {} @ {}'.format(me, errmsg if errmsg else E, mi.tblineno()) SL.error(errmsg) raise '''...