def chain_measure_bps_and_tps(archival_node, start_time, end_time, duration=None): latest_block_hash = archival_node.get_status( )['sync_info']['latest_block_hash'] curr_block = archival_node.get_block(latest_block_hash)['result'] curr_time = get_timestamp(curr_block) if end_time is None: end_time = curr_time if start_time is None: start_time = end_time - duration # one entry per block, equal to the timestamp of that block block_times = [] # one entry per block (because there is only one shard), equal to the number of transactions tx_count = [] while curr_time > start_time: if curr_time < end_time: block_times.append(curr_time) chunk_hash = curr_block['chunks'][0]['chunk_hash'] chunk = archival_node.get_chunk(chunk_hash)['result'] tx_count.append(len(chunk['transactions'])) prev_hash = curr_block['header']['prev_hash'] curr_block = archival_node.get_block(prev_hash)['result'] curr_time = get_timestamp(curr_block) block_times.reverse() tx_count.reverse() tx_cumulative = data.compute_cumulative(tx_count) bps = data.compute_rate(block_times) tps_fit = data.linear_regression(block_times, tx_cumulative) return {'bps': bps, 'tps': tps_fit['slope']}
def chain_measure_bps_and_tps(archival_node, start_time, end_time, duration=None): latest_block_hash = archival_node.get_latest_block().hash curr_block = archival_node.get_block(latest_block_hash)['result'] curr_time = get_timestamp(curr_block) if end_time is None: end_time = curr_time if start_time is None: start_time = end_time - duration logger.info( f'Measuring BPS and TPS in the time range {start_time} to {end_time}') # One entry per block, equal to the timestamp of that block. block_times = [] # One entry per block, containing the count of transactions in all chunks of the block. tx_count = [] block_counter = 0 while curr_time > start_time: if curr_time < end_time: block_times.append(curr_time) gas_per_chunk = [] for chunk in curr_block['chunks']: gas_per_chunk.append(chunk['gas_used'] * 1e-12) gas_block = sum(gas_per_chunk) tx_per_chunk = [None] * len(curr_block['chunks']) pmap( lambda i: get_chunk_txn(i, curr_block['chunks'], archival_node, tx_per_chunk), range(len(curr_block['chunks']))) txs = sum(tx_per_chunk) tx_count.append(txs) logger.info( f'Processed block at time {curr_time} height #{curr_block["header"]["height"]}, # txs in a block: {txs}, per chunk: {tx_per_chunk}, gas in block: {gas_block}, gas per chunk: {gas_per_chunk}' ) prev_hash = curr_block['header']['prev_hash'] curr_block = archival_node.get_block(prev_hash)['result'] curr_time = get_timestamp(curr_block) block_times.reverse() tx_count.reverse() assert block_times tx_cumulative = data.compute_cumulative(tx_count) bps = data.compute_rate(block_times) tps_fit = data.linear_regression(block_times, tx_cumulative) logger.info( f'Num blocks: {len(block_times)}, num transactions: {len(tx_count)}, bps: {bps}, tps_fit: {tps_fit}' ) return {'bps': bps, 'tps': tps_fit['slope']}
def measure_tps_bps(nodes, tx_filename): input_tx_events = mocknet.get_tx_events(nodes, tx_filename) # drop first and last 5% of events to avoid edges of test n = int(0.05 * len(input_tx_events)) input_tx_events = input_tx_events[n:-n] input_tps = data.compute_rate(input_tx_events) measurement = mocknet.chain_measure_bps_and_tps(nodes[-1], input_tx_events[0], input_tx_events[-1]) result = { 'bps': measurement['bps'], 'in_tps': input_tps, 'out_tps': measurement['tps'] } logger.info(f'{result}') return result