def findPending(self, to): pending={} ep=str(epoch()) if not os.path.exists(self.filename): print('No directory') return [] if not os.path.exists(self.filename+'/receipts'): print('No receipts') return [] if not os.path.exists(self.filename+'/receipts/'+ep): print('No receipts for this epoch: '+ep) return [] for receiptfile in os.listdir(self.filename+'/receipts/'+ep): receipt=Receipt() receipt.load(self.filename+'/receipts/'+ep+'/'+receiptfile) print(receipt.id) if receipt.cmd=='send': if receipt.args==to: coin=receipt.coin pending[coin.id]=receipt print(pending) for receiptfile in os.listdir(self.filename+'/receipts/'+ep): receipt=Receipt() receipt.load(self.filename+'/receipts/'+ep+'/'+receiptfile) print(receipt.id) if receipt.cmd=='receive': if receipt.args==to: coin=receipt.coin if coin.id in pending: del pending[coin.id] print(pending) return pending.values()
def receive(): (pub, priv) = loadKeys("GitBank") cs = Coins("GitBank") receipts = Receipts("GitBank") pending = receipts.findPending(pub) if len(pending) == 0: print("No pending coins") elif len(pending) == 1: print("1 pending coin") else: print(str(len(pending)) + " pending coins") for receipt in pending: print(receipt) if receipt.cmd != "send": print("Unknown command: " + str(receipt.cmd)) return if receipt.args != pub: print("Not me: " + str(receipt.args) + " " + str(pub)) return if not receipt.verify(): print("Not verified") return cs.add(receipt.coin) receipt = Receive(None, pub, epoch(), receipt.coin, receipt.pub) receipt.setPrivate(priv) receipt.sign() receipts.add(receipt)
def mprf_history(args): """history - report contents of the mpra table usage: mpra history [-s/--since] <date/time> """ p = optparse.OptionParser() p.add_option('-d', '--debug', action='store_true', default=False, dest='debug', help='run the debugger') p.add_option('-s', '--since', action='store', default='', dest='since', help='only report records since ...') (o, a) = p.parse_args(args) if o.debug: pdb.set_trace() n_since = util.epoch(o.since) if o.since else 0 report = rpt_lib.get_mpra_report(last_rpt_time=n_since) print(report)
def send(args): to = args[0] f = open("participants.json", "rb") participants = json.loads(f.read()) f.close() if not to in participants: print("Unknown participant: " + str(to)) return toPub = participants[to] to = loadPublic(toPub, format="PEM") (pub, priv) = loadKeys("GitBank") cs = Coins("GitBank") coin = cs.get() if not coin: print("No coins!") return receipt = Send(None, pub, epoch(), coin, to) receipt.setPrivate(priv) receipt.sign() receipts = Receipts("GitBank") receipts.add(receipt)
def create(self, proof=None): coins=Coins(self.dir) coin=coins.new(self.pub, self.priv) receipts=Receipts(self.dir) cr=Create(None, self.pub, epoch(), coin, proof) cr.setPrivate(self.priv) cr.sign() receipts.add(cr)
def load_history(filename): """ Read log file *filename* and create records in table pfx_history corresponding to each time the cv plugin was run. Line containing 'cv_plugin' and 'firing up' indicates run time. Subsequent line containing 'cv_plugin' and 'failures: %d' indicate errors for the run. """ # ------------------------------------------------------------------------- def cv_fires(line): """ Parse *line* to decide whether it indicates a firing of the cv plugin. """ return all([runtime is None, error is None, 'cv_plugin' in line or 'checksum-verifier' in line, 'firing up' in line]) # ------------------------------------------------------------------------- def cv_completes(line): """ Parse *line* to decide whether it indicates completion of a firing of the cv plugin. """ return all([runtime is not None, error is None, 'cv_plugin' in line or 'checksum-verifier' in line, 'failures:' in line, 'totals' not in line]) # ------------------------------------------------------------------------- db = CrawlDBI.DBI(dbtype='crawler') runtime = error = None with open(filename, 'r') as f: for line in f: if cv_fires(line): runtime = U.epoch(line[0:18]) if cv_completes(line): error = int(U.rgxin('failures: (\d+)', line)) if runtime is not None and error is not None: db.insert(table='history', ignore=True, fields=['plugin', 'runtime', 'errors'], data=[('cv', runtime, error)]) runtime = error = None db.close()
def mprf_epoch(args): """epoch - convert a YYYY.mmdd HH:MM:SS to an epoch time usage: mpra epoch 2014.0201.10.27.53 """ p = optparse.OptionParser() p.add_option('-d', '--debug', action='store_true', default=False, dest='debug', help='run the debugger') (o, a) = p.parse_args(args) if o.debug: pdb.set_trace() for ymd in a: print(int(util.epoch(ymd)))
def send(dir, coin, to): try: receipt=Send(None, pub, epoch(), coin, loadPublic(to)) receipt.setPrivate(priv) receipt.sign() receipts=Receipts() receipts.load(dir+'/receipts.dat') receipts.add(receipt) smsg=json.dumps(receipt.save(True)) print('sending') client=Client() yield client.connect('localhost', 7050) yield client.write(smsg+"\n") s=yield client.read_until("\n") msg=json.loads(s) receipt=Receive() receipt.load(msg) if receipt.cmd!='receive': print('Unknown command: '+str(receipt.cmd)) return if receipt.args.save_pkcs1('DER')!=pub.save_pkcs1('DER'): print('Not me') return if not rsa.verify(str(receipt.sig), receipt.pub): print('Not verified') return cs.save(dir+'/coins.dat') receipts.add(receipt) print('saving '+str(len(receipts.receipts))) receipts.save(dir+'/receipts.dat') eventloop.halt() except Exception, e: print('Exception:') print(e) traceback.print_exc()
def handle_recv(conn): print('connected') s=yield conn.read_until("\n") print('read') print(s) smsg=json.loads(s) receipts=Receipts() receipts.load(dir+'/receipts.dat') receipt=Send() receipt.load(smsg) if receipt.cmd!='send': print('Unknown command: '+str(receipt.cmd)) return if receipt.args!=pub: print('Not me: '+str(receipt.args)+' '+str(pub)) return if not rsa.verify(str(receipt.sig), receipt.pub): print('Not verified') return cs.add(receipt.coin) cs.save(dir+'/coins.dat') receipts.add(receipt) receipts.save(dir+'/receipts.dat') receipt=Receive(None, pub, epoch(), receipt.coin, receipt.pub) receipt.setPrivate(priv) receipt.sign() receipts.add(receipt) receipts.save(dir+'/receipts.dat') smsg=json.dumps(receipt.save(True)) print('sending') print(smsg) yield conn.write(smsg+"\n") print('sent')
def mprf_migr_recs(args): """migr_recs - list the records in table BFMIGRREC usage: mpra migr_recs [-l/limit N] [-b/--before DATE-TIME] [-a/--after DATE-TIME] with -l N, only report the first N records with -b DATE-TIME, only report the records with create times before DATE-TIME. with -a DATE-TIME, only report the records with create times after DATE-TIME. """ p = optparse.OptionParser() p.add_option('-c', '--count', action='store_true', default=False, dest='count', help='report record counts rather than records') p.add_option('-d', '--debug', action='store_true', default=False, dest='debug', help='run the debugger') p.add_option('-l', '--limit', action='store', default='', dest='limit', help='how many records to fetch') p.add_option('-b', '--before', action='store', default='', dest='before', help='fetch records from before the date/time') p.add_option('-a', '--after', action='store', default='', dest='after', help='fetch records from after the date/time') (o, a) = p.parse_args(args) if o.debug: pdb.set_trace() cfg = CrawlConfig.get_config() dbargs = {'table': 'bfmigrrec'} if o.limit == '' and o.before == '' and o.after == '': dbargs['limit'] = 30 elif o.limit == '' and o.before == '' and o.after != '': dbargs['where'] = '? < record_create_time' dbargs['data'] = (util.epoch(o.after),) elif o.limit == '' and o.before != '' and o.after == '': dbargs['where'] = 'record_create_time < ?' dbargs['data'] = (util.epoch(o.before),) elif o.limit == '' and o.before != '' and o.after != '': dbargs['where'] = '? < record_create_time and record_create_time < ?' dbargs['data'] = (util.epoch(o.after), util.epoch(o.before)) elif o.limit != '' and o.before == '' and o.after == '': dbargs['limit'] = int(o.limit) elif o.limit != '' and o.before == '' and o.after != '': dbargs['limit'] = int(o.limit) dbargs['where'] = '? < record_create_time' dbargs['data'] = (util.epoch(o.after),) elif o.limit != '' and o.before != '' and o.after == '': dbargs['limit'] = int(o.limit) dbargs['where'] = 'record_create_time < ?' dbargs['data'] = (util.epoch(o.before),) elif o.limit != '' and o.before != '' and o.after != '': dbargs['limit'] = int(o.limit) dbargs['where'] = '? < record_create_time and record_create_time < ?' dbarsg['data'] = (util.epoch(o.after), util.epoch(o.before)) if o.count: dbargs['fields'] = ['count(*)'] else: dbargs['fields'] = ['bfid', 'record_create_time', 'migration_failure_count'] dbargs['orderby'] = 'record_create_time' rows = mpra_lib.lookup_migr_recs(**dbargs) for row in rows: if o.count: print("Records found: %d" % row['1']) else: print("%s %s %d" % (CrawlDBI.DBIdb2.hexstr(row['BFID']), util.ymdhms(row['RECORD_CREATE_TIME']), row['MIGRATION_FAILURE_COUNT']))
def mprf_age(args): """age - list the records in table BFMIGRREC or BFPURGEREC older than age usage: mpra age -t [migr|purge] -a/--age N[S|M|H|d|m|Y] [-c/--count] Report migration records (or a count of them) older than the age indicated. --age N -- report records older than N --before D -- report records from before date D --start S -- report records with timestamps larger than S --end E -- report recs with timestampes smaller than E """ p = optparse.OptionParser() p.add_option('-a', '--age', action='store', default='', dest='age', help='report records older than this') p.add_option('-b', '--before', action='store', default='', dest='before', help='report records from before this epoch') p.add_option('-c', '--count', action='store_true', default=False, dest='count', help='report record counts rather than records') p.add_option('-d', '--debug', action='store_true', default=False, dest='debug', help='run the debugger') p.add_option('-e', '--end', action='store', default='', dest='end', help='ending epoch time') p.add_option('-p', '--path', action='store_true', default=False, dest='path', help='report paths as well as bitfile IDs') p.add_option('-s', '--start', action='store', default='', dest='start', help='starting epoch time') p.add_option('-t', '--table', action='store', default='', dest='table', help='which table to age') (o, a) = p.parse_args(args) if o.debug: pdb.set_trace() cfg = CrawlConfig.get_config() start = 0 if o.age and o.before: raise StandardError("--age and --before are mutually exclusive") elif o.age and '' != o.end: raise StandardError("--age and --end are mutually exclusive") elif o.before and '' != o.end: raise StandardError("--before and --end are mutually exclusive") elif o.before: end = time.mktime(time.strptime(o.before, "%Y.%m%d")) elif o.age: end = time.time() - cfg.to_seconds(o.age) elif o.end: end = util.epoch(o.end) if o.start: start = util.epoch(o.start) if o.table == '': o.table = 'migr' print("%d, %d" % (start, end)) mpra_lib.age(o.table, start, end, o.count, sys.stdout, path=o.path)
def mprf_migr_recs(args): """migr_recs - list the records in table BFMIGRREC usage: mpra migr_recs [-l/limit N] [-b/--before DATE-TIME] [-a/--after DATE-TIME] with -l N, only report the first N records with -b DATE-TIME, only report the records with create times before DATE-TIME. with -a DATE-TIME, only report the records with create times after DATE-TIME. """ p = optparse.OptionParser() p.add_option('-c', '--count', action='store_true', default=False, dest='count', help='report record counts rather than records') p.add_option('-d', '--debug', action='store_true', default=False, dest='debug', help='run the debugger') p.add_option('-l', '--limit', action='store', default='', dest='limit', help='how many records to fetch') p.add_option('-b', '--before', action='store', default='', dest='before', help='fetch records from before the date/time') p.add_option('-a', '--after', action='store', default='', dest='after', help='fetch records from after the date/time') (o, a) = p.parse_args(args) if o.debug: pdb.set_trace() cfg = CrawlConfig.get_config() dbargs = {'table': 'bfmigrrec'} if o.limit == '' and o.before == '' and o.after == '': dbargs['limit'] = 30 elif o.limit == '' and o.before == '' and o.after != '': dbargs['where'] = '? < record_create_time' dbargs['data'] = (util.epoch(o.after), ) elif o.limit == '' and o.before != '' and o.after == '': dbargs['where'] = 'record_create_time < ?' dbargs['data'] = (util.epoch(o.before), ) elif o.limit == '' and o.before != '' and o.after != '': dbargs['where'] = '? < record_create_time and record_create_time < ?' dbargs['data'] = (util.epoch(o.after), util.epoch(o.before)) elif o.limit != '' and o.before == '' and o.after == '': dbargs['limit'] = int(o.limit) elif o.limit != '' and o.before == '' and o.after != '': dbargs['limit'] = int(o.limit) dbargs['where'] = '? < record_create_time' dbargs['data'] = (util.epoch(o.after), ) elif o.limit != '' and o.before != '' and o.after == '': dbargs['limit'] = int(o.limit) dbargs['where'] = 'record_create_time < ?' dbargs['data'] = (util.epoch(o.before), ) elif o.limit != '' and o.before != '' and o.after != '': dbargs['limit'] = int(o.limit) dbargs['where'] = '? < record_create_time and record_create_time < ?' dbarsg['data'] = (util.epoch(o.after), util.epoch(o.before)) if o.count: dbargs['fields'] = ['count(*)'] else: dbargs['fields'] = [ 'bfid', 'record_create_time', 'migration_failure_count' ] dbargs['orderby'] = 'record_create_time' rows = mpra_lib.lookup_migr_recs(**dbargs) for row in rows: if o.count: print("Records found: %d" % row['1']) else: print("%s %s %d" % (CrawlDBI.DBIdb2.hexstr( row['BFID']), util.ymdhms(row['RECORD_CREATE_TIME']), row['MIGRATION_FAILURE_COUNT']))