def configure(args): global CSV_WRITER global OUT_FILE coloredlogs.install(level=args.log_level, logger=logger) import csv OUT_FILE = open(args.csv, 'w') CSV_WRITER = csv.DictWriter( OUT_FILE, # Might be useful to add 'operation' and 'size' fieldnames=[ 'txid', 'size', 'ts_send', 'ts_accept', 'ts_commit', 'ts_error' ]) CSV_WRITER.writeheader() def emit(stats): logger.info( 'Processing transactions, ' 'queue: %s (%s tx/s), accepted: %s (%s tx/s), committed %s (%s tx/s), errored %s (%s tx/s), mempool %s (%s tx/s)', stats['queue'], stats.get('queue.speed', 0), stats['accept'], stats.get('accept.speed', 0), stats['commit'], stats.get('commit.speed', 0), stats['error'], stats.get('error.speed', 0), stats['mempool'], stats.get('mempool.speed', 0)) import logstats ls = logstats.Logstats(emit_func=emit) ls['accept'] = 0 ls['commit'] = 0 ls['error'] = 0 logstats.thread.start(ls) bigchaindb_benchmark.config = {'ls': ls}
def run_load(args): logger.info('Starting %s processes', args.multiprocess) stats = logstats.Logstats() logstats.thread.start(stats) tx_left = None if args.count > 0: tx_left = int(args.count / args.multiprocess) workers = ProcessGroup(concurrency=args.multiprocess, target=_run_load, args=(tx_left, stats.get_child())) workers.start()
def run_load(args): bigchaindb.config_utils.autoconfigure(filename=args.config, force=True) logger.info('Starting %s processes', args.multiprocess) stats = logstats.Logstats() logstats.thread.start(stats) tx_left = None if args.count > 0: tx_left = int(args.count / args.multiprocess) workers = ProcessGroup(concurrency=args.multiprocess, target=_run_load, args=(tx_left, stats.get_child())) workers.start()
def init(args): coloredlogs.install(level=args.log_level) def emit(stats): log.info( 'Processing transactions,' 'queue: %s (%s tx/s), sent: %s (%s tx/s), accepted: %s (%s tx/s), committed %s (%s tx/s), errored %s (%s tx/s), mempool %s (%s tx/s)', stats['queue'], stats.get('queue.speed', 0), stats['sent'], stats.get('sent.speed', 0), stats['accept'], stats.get('accept.speed', 0), stats['commit'], stats.get('commit.speed', 0), stats['error'], stats.get('error.speed', 0), stats['mempool'], stats.get('mempool.speed', 0)) import logstats ls = logstats.Logstats(emit_func=emit) ls['accept'] = 0 ls['commit'] = 0 ls['error'] = 0 logstats.thread.start(ls) args.ls = ls
import logging logging.basicConfig(level=logging.INFO) import time import logstats from random import choice, uniform ls = logstats.Logstats() logstats.thread.start(ls) while True: ls[choice(['A', 'B', 'C'])] += 1 time.sleep(uniform(0, 0.2))