def insert_raw_transaction(raw_transaction, db, rawtransactions_db): """Add a raw transaction to the database.""" # one transaction per block block_index, block_hash, block_time = create_next_block(db, parse_block=False) cursor = db.cursor() tx_index = block_index - config.BURN_START + 1 tx_hash = hashlib.sha256('{}{}'.format( tx_index, raw_transaction).encode('utf-8')).hexdigest() # print(tx_hash) # Remember to add it to the log dump if pytest.config.option.savescenarios: save_rawtransaction(rawtransactions_db, tx_hash, raw_transaction) source, destination, btc_amount, fee, data = blocks._get_tx_info( raw_transaction) transaction = (tx_index, tx_hash, block_index, block_hash, block_time, source, destination, btc_amount, fee, data, True) cursor.execute( '''INSERT INTO transactions VALUES (?,?,?,?,?,?,?,?,?,?,?)''', transaction) tx = list( cursor.execute('''SELECT * FROM transactions WHERE tx_index = ?''', (tx_index, )))[0] cursor.close() util.CURRENT_BLOCK_INDEX = block_index # TODO: Correct?! blocks.parse_block(db, block_index, block_time) return tx
def insert_raw_transaction(raw_transaction, db): """Add a raw transaction to the database.""" cursor = db.cursor() # one transaction per block block_index, block_hash, block_time = create_next_block(db, parse_block=False) tx_hash = dummy_tx_hash(raw_transaction) tx = None tx_index = block_index - config.BURN_START + 1 try: source, destination, btc_amount, fee, data, extra = blocks._get_tx_info( raw_transaction) transaction = (tx_index, tx_hash, block_index, block_hash, block_time, source, destination, btc_amount, fee, data, True) cursor.execute( '''INSERT INTO transactions VALUES (?,?,?,?,?,?,?,?,?,?,?)''', transaction) tx = list( cursor.execute('''SELECT * FROM transactions WHERE tx_index = ?''', (tx_index, )))[0] except exceptions.BTCOnlyError: pass cursor.close() MOCK_UTXO_SET.add_raw_transaction(raw_transaction, tx_id=tx_hash, confirmations=1) util.CURRENT_BLOCK_INDEX = block_index blocks.parse_block(db, block_index, block_time) return tx_hash, tx
def insert_block(db, block_index, parse_block=False): """Add blocks to the blockchain.""" cursor = db.cursor() block_hash = hashlib.sha512(chr(block_index).encode('utf-8')).hexdigest() block_time = block_index * 1000 block = (block_index, block_hash, block_time, None, None, None, None) cursor.execute('''INSERT INTO blocks (block_index, block_hash, block_time, ledger_hash, txlist_hash, previous_block_hash, difficulty) VALUES (?,?,?,?,?,?,?)''', block) util.CURRENT_BLOCK_INDEX = block_index # TODO: Correct?! cursor.close() if parse_block: blocks.parse_block(db, block_index, block_time) return block_index, block_hash, block_time
def insert_block(db, block_index, parse_block=True): """Add blocks to the blockchain.""" cursor = db.cursor() block_hash = util.dhash_string(chr(block_index)) block_time = block_index * 1000 block = (block_index, block_hash, block_time, None, None, None, None) cursor.execute('''INSERT INTO blocks (block_index, block_hash, block_time, ledger_hash, txlist_hash, previous_block_hash, difficulty) VALUES (?,?,?,?,?,?,?)''', block) util.CURRENT_BLOCK_INDEX = block_index # TODO: Correct?! cursor.close() if parse_block: blocks.parse_block(db, block_index, block_time) return block_index, block_hash, block_time
def insert_block(db, block_index, parse_block=False): """Add blocks to the blockchain.""" cursor = db.cursor() block_hash = hashlib.sha512(chr(block_index).encode('utf-8')).hexdigest() block_time = block_index * 1000 block = (block_index, block_hash, block_time, None, None, None, None) cursor.execute( '''INSERT INTO blocks (block_index, block_hash, block_time, ledger_hash, txlist_hash, previous_block_hash, difficulty) VALUES (?,?,?,?,?,?,?)''', block) util.CURRENT_BLOCK_INDEX = block_index # TODO: Correct?! cursor.close() if parse_block: blocks.parse_block(db, block_index, block_time) return block_index, block_hash, block_time
def test_parse_block(server_db): test_outputs = blocks.parse_block(server_db, DP['default_block_index'], 1420914478) outputs = ('44cf374045f44caf86c7b7de61de3e712f4ba3c39523ab95bc68149ef8aede18', 'b4d68165bdbd4e14cf5f426bd32d9ebf831688f97b4d2a99340afc7db5817ba0', 'fafa399384e61785222b285cf1cde836cd8b64edbb8c1087bf4a28c9ace84f95', None) try: assert outputs == test_outputs except AssertionError: msg = "expected outputs don't match test_outputs:\noutputs=\n" + pprint.pformat(outputs) + "\ntest_outputs=\n" + pprint.pformat(test_outputs) raise AssertionError(msg)
def test_parse_block(server_db): test_outputs = blocks.parse_block(server_db, DP['default_block_index'], 1420914478) outputs = ('8fd6e608405de90b1da9f1f2b17578eb2ab736ec54716558ad4a8b63987d5b91', 'a825bc3f82fd92c142c818ed8ebc1e660eb39f9c0e7f6e188a35c06b2fb7dcb2', '66cd5bdbf6c9c343258ed2cdf9c7fb6e4360bf74169e30aa6835e919a70a54ef', None) try: assert outputs == test_outputs except AssertionError: msg = "expected outputs don't match test_outputs:\noutputs=\n" + pprint.pformat(outputs) + "\ntest_outputs=\n" + pprint.pformat(test_outputs) raise AssertionError(msg)
def test_parse_block(server_db): test_outputs = blocks.parse_block(server_db, DP['default_block_index'], 1420914478) outputs = ('baa1c5b432094ebc0d0d817db8e874e112d2cc539632a6754bce699e5fb1643f', '072451f3cc45d91613e1cb9867125585d1a5460a5e5afeed2cee79ec703ff0f3', '14a704d47bc2cedda88e779e4a058d382ecb1c60cb499eb61f1d4c6a2017f084', None) try: assert outputs == test_outputs except AssertionError: msg = "expected outputs don't match test_outputs:\noutputs=\n" + pprint.pformat(outputs) + "\ntest_outputs=\n" + pprint.pformat(test_outputs) raise AssertionError(msg)
def insert_raw_transaction(raw_transaction, db): """Add a raw transaction to the database.""" cursor = db.cursor() # one transaction per block block_index, block_hash, block_time = create_next_block(db, parse_block=False) tx_hash = dummy_tx_hash(raw_transaction) tx_index = block_index - config.BURN_START + 1 source, destination, btc_amount, fee, data = blocks._get_tx_info(raw_transaction) transaction = (tx_index, tx_hash, block_index, block_hash, block_time, source, destination, btc_amount, fee, data, True) cursor.execute('''INSERT INTO transactions VALUES (?,?,?,?,?,?,?,?,?,?,?)''', transaction) tx = list(cursor.execute('''SELECT * FROM transactions WHERE tx_index = ?''', (tx_index,)))[0] cursor.close() MOCK_UTXO_SET.add_raw_transaction(raw_transaction, tx_id=tx_hash, confirmations=1) util.CURRENT_BLOCK_INDEX = block_index blocks.parse_block(db, block_index, block_time) return tx
def insert_raw_transaction(raw_transaction, db, rawtransactions_db): """Add a raw transaction to the database.""" # one transaction per block block_index, block_hash, block_time = create_next_block(db) cursor = db.cursor() tx_index = block_index - config.BURN_START + 1 tx_hash = hashlib.sha256('{}{}'.format(tx_index,raw_transaction).encode('utf-8')).hexdigest() # print(tx_hash) # Remember to add it to the log dump if pytest.config.option.savescenarios: save_rawtransaction(rawtransactions_db, tx_hash, raw_transaction) source, destination, btc_amount, fee, data = blocks.get_tx_info2(raw_transaction) transaction = (tx_index, tx_hash, block_index, block_hash, block_time, source, destination, btc_amount, fee, data, True) cursor.execute('''INSERT INTO transactions VALUES (?,?,?,?,?,?,?,?,?,?,?)''', transaction) tx = list(cursor.execute('''SELECT * FROM transactions WHERE tx_index = ?''', (tx_index,)))[0] cursor.close() util.CURRENT_BLOCK_INDEX = block_index # TODO: Correct?! blocks.parse_block(db, block_index, block_time) return tx
def test_parse_block(server_db): test_outputs = blocks.parse_block(server_db, DP['default_block_index'], 1420914478) outputs = ( '74f02a316f377684fd1271cfe357c083f337feffc73afebc7a2c124ce95d30fe', '34d00501907f38d5fbe362d2cab991cfda7eb1bd3584b85b3e063ab3d045cd93', '0faccf23ec33b79f92295d1b8de0a632ea323d0ea098e83b6e4a01fe44254dec', None) try: assert outputs == test_outputs except AssertionError: msg = "expected outputs don't match test_outputs:\noutputs=\n" + pprint.pformat( outputs) + "\ntest_outputs=\n" + pprint.pformat(test_outputs) raise AssertionError(msg)
def test_parse_block(server_db): test_outputs = blocks.parse_block(server_db, DP['default_block_index'], 1420914478) outputs = ( '430eac3b9b17e819bf88a50b24ac72a66c799deaf8c6ac0f9c4e4d0a986d2a74', 'bc61e5fbb427b302a05e06a03929d8b9717f11468666fcda00533cd8aecbba03', '4938394e40cd7584bf2f5033c8013b19d3c83af07caddb6595a88c1954dc85d9', None) try: assert outputs == test_outputs except AssertionError: msg = "expected outputs don't match test_outputs:\noutputs=\n" + pprint.pformat( outputs) + "\ntest_outputs=\n" + pprint.pformat(test_outputs) raise AssertionError(msg)
def test_parse_block(server_db): test_outputs = blocks.parse_block(server_db, DP['default_block_index'], 1420914478) outputs = ( 'baa1c5b432094ebc0d0d817db8e874e112d2cc539632a6754bce699e5fb1643f', '072451f3cc45d91613e1cb9867125585d1a5460a5e5afeed2cee79ec703ff0f3', '14a704d47bc2cedda88e779e4a058d382ecb1c60cb499eb61f1d4c6a2017f084', None) try: assert outputs == test_outputs except AssertionError: msg = "expected outputs don't match test_outputs:\noutputs=\n" + pprint.pformat( outputs) + "\ntest_outputs=\n" + pprint.pformat(test_outputs) raise AssertionError(msg)
def reparse(testnet=True): """Reparse all transaction from the database, create a new blockchain and compare it to the old one.""" options = dict(COUNTERPARTYD_OPTIONS) server.initialise(database_file=':memory:', testnet=testnet, **options) if testnet: config.PREFIX = b'TESTXXXX' logger = logging.getLogger() console = logging.StreamHandler() console.setLevel(logging.INFO) formatter = logging.Formatter('%(message)s') console.setFormatter(formatter) logger.addHandler(console) memory_db = database.get_connection(read_only=False) initialise_db(memory_db) data_dir = appdirs.user_data_dir(appauthor=config.XCP_NAME, appname=config.APP_NAME, roaming=True) prod_db_path = os.path.join(data_dir, '{}{}.db'.format(config.APP_NAME, '.testnet' if testnet else '')) prod_db = apsw.Connection(prod_db_path) prod_db.setrowtrace(database.rowtracer) with memory_db.backup("main", prod_db, "main") as backup: backup.step() # Here we don’t use block.reparse() because it reparse db in transaction (`with db`). memory_cursor = memory_db.cursor() for table in blocks.TABLES + ['balances']: memory_cursor.execute('''DROP TABLE IF EXISTS {}'''.format(table)) # Check that all checkpoint blocks are in the database to be tested. if testnet: CHECKPOINTS = check.CHECKPOINTS_TESTNET else: CHECKPOINTS = check.CHECKPOINTS_MAINNET for block_index in CHECKPOINTS.keys(): block_exists = bool(list(memory_cursor.execute('''SELECT * FROM blocks WHERE block_index = ?''', (block_index,)))) assert block_exists # Clean consensus hashes if first block hash don’t match with checkpoint. checkpoints = check.CHECKPOINTS_TESTNET if config.TESTNET else check.CHECKPOINTS_MAINNET columns = [column['name'] for column in memory_cursor.execute('''PRAGMA table_info(blocks)''')] for field in ['ledger_hash', 'txlist_hash']: if field in columns: sql = '''SELECT {} FROM blocks WHERE block_index = ?'''.format(field) first_hash = list(memory_cursor.execute(sql, (config.BLOCK_FIRST,)))[0][field] if first_hash != checkpoints[config.BLOCK_FIRST][field]: logger.info('First hash changed. Cleaning {}.'.format(field)) memory_cursor.execute('''UPDATE blocks SET {} = NULL'''.format(field)) blocks.initialise(memory_db) previous_ledger_hash = None previous_txlist_hash = None previous_messages_hash = None memory_cursor.execute('''SELECT * FROM blocks ORDER BY block_index''') for block in memory_cursor.fetchall(): try: util.CURRENT_BLOCK_INDEX = block['block_index'] previous_ledger_hash, previous_txlist_hash, previous_messages_hash, previous_found_messages_hash = blocks.parse_block( memory_db, block['block_index'], block['block_time'], previous_ledger_hash=previous_ledger_hash, ledger_hash=block['ledger_hash'], previous_txlist_hash=previous_txlist_hash, txlist_hash=block['txlist_hash'], previous_messages_hash=previous_messages_hash) logger.info('Block (re-parse): %s (hashes: L:%s / TX:%s / M:%s%s)' % ( block['block_index'], previous_ledger_hash[-5:], previous_txlist_hash[-5:], previous_messages_hash[-5:], (' [overwrote %s]' % previous_found_messages_hash) if previous_found_messages_hash and previous_found_messages_hash != previous_messages_hash else '')) except check.ConsensusError as e: message = str(e) if message.find('ledger_hash') != -1: new_ledger = get_block_ledger(memory_db, block['block_index']) old_ledger = get_block_ledger(prod_db, block['block_index']) compare_strings(old_ledger, new_ledger) elif message.find('txlist_hash') != -1: new_txlist = get_block_txlist(memory_db, block['block_index']) old_txlist = get_block_txlist(prod_db, block['block_index']) compare_strings(old_txlist, new_txlist) raise(e)
def reparse(testnet=True): """Reparse all transaction from the database, create a new blockchain and compare it to the old one.""" options = dict(COUNTERPARTYD_OPTIONS) server.initialise(database_file=':memory:', testnet=testnet, **options) logger = logging.getLogger() if testnet: config.PREFIX = b'TESTXXXX' memory_db = database.get_connection(read_only=False) initialise_db(memory_db) data_dir = appdirs.user_data_dir(appauthor=config.XCP_NAME, appname=config.APP_NAME, roaming=True) prod_db_path = os.path.join( data_dir, '{}{}.db'.format(config.APP_NAME, '.testnet' if testnet else '')) assert os.path.exists( prod_db_path), "database path {} does not exist".format(prod_db_path) prod_db = apsw.Connection(prod_db_path) prod_db.setrowtrace(database.rowtracer) with memory_db.backup("main", prod_db, "main") as backup: backup.step() # Here we don’t use block.reparse() because it reparse db in transaction (`with db`). memory_cursor = memory_db.cursor() for table in blocks.TABLES + ['balances']: memory_cursor.execute('''DROP TABLE IF EXISTS {}'''.format(table)) # Check that all checkpoint blocks are in the database to be tested. if testnet: CHECKPOINTS = check.CHECKPOINTS_TESTNET else: CHECKPOINTS = check.CHECKPOINTS_MAINNET for block_index in CHECKPOINTS.keys(): block_exists = bool( list( memory_cursor.execute( '''SELECT * FROM blocks WHERE block_index = ?''', (block_index, )))) assert block_exists, "block #%d does not exist" % block_index # Clean consensus hashes if first block hash don’t match with checkpoint. checkpoints = check.CHECKPOINTS_TESTNET if config.TESTNET else check.CHECKPOINTS_MAINNET columns = [ column['name'] for column in memory_cursor.execute('''PRAGMA table_info(blocks)''') ] for field in ['ledger_hash', 'txlist_hash']: if field in columns: sql = '''SELECT {} FROM blocks WHERE block_index = ?'''.format( field) first_hash = list( memory_cursor.execute(sql, (config.BLOCK_FIRST, )))[0][field] if first_hash != checkpoints[config.BLOCK_FIRST][field]: logger.info('First hash changed. Cleaning {}.'.format(field)) memory_cursor.execute( '''UPDATE blocks SET {} = NULL'''.format(field)) blocks.initialise(memory_db) previous_ledger_hash = None previous_txlist_hash = None previous_messages_hash = None memory_cursor.execute('''SELECT * FROM blocks ORDER BY block_index''') for block in memory_cursor.fetchall(): try: util.CURRENT_BLOCK_INDEX = block['block_index'] previous_ledger_hash, previous_txlist_hash, previous_messages_hash, previous_found_messages_hash = blocks.parse_block( memory_db, block['block_index'], block['block_time'], previous_ledger_hash=previous_ledger_hash, ledger_hash=block['ledger_hash'], previous_txlist_hash=previous_txlist_hash, txlist_hash=block['txlist_hash'], previous_messages_hash=previous_messages_hash) logger.info( 'Block (re-parse): %s (hashes: L:%s / TX:%s / M:%s%s)' % (block['block_index'], previous_ledger_hash[-5:], previous_txlist_hash[-5:], previous_messages_hash[-5:], (' [overwrote %s]' % previous_found_messages_hash) if previous_found_messages_hash and previous_found_messages_hash != previous_messages_hash else '')) except check.ConsensusError as e: message = str(e) if message.find('ledger_hash') != -1: new_ledger = get_block_ledger(memory_db, block['block_index']) old_ledger = get_block_ledger(prod_db, block['block_index']) compare_strings(old_ledger, new_ledger) elif message.find('txlist_hash') != -1: new_txlist = get_block_txlist(memory_db, block['block_index']) old_txlist = get_block_txlist(prod_db, block['block_index']) compare_strings(old_txlist, new_txlist) raise (e)
def reparse(testnet=True): """ Reparse all transaction from the database. - Create a new in-memory DB, copy the DB that is on-disk - Reparse DB, automatically compares consensus hashes to the original ones from the on-disk DB """ options = dict(COUNTERPARTYD_OPTIONS) server.initialise(database_file=':memory:', testnet=testnet, **options) logger = logging.getLogger() if testnet: config.PREFIX = b'TESTXXXX' memory_db = database.get_connection(read_only=False) data_dir = appdirs.user_data_dir(appauthor=config.XCP_NAME, appname=config.APP_NAME, roaming=True) prod_db_path = os.path.join(data_dir, '{}{}.db'.format(config.APP_NAME, '.testnet' if testnet else '')) assert os.path.exists(prod_db_path), "database path {} does not exist".format(prod_db_path) prod_db = apsw.Connection(prod_db_path) prod_db.setrowtrace(database.rowtracer) # Copy DB from file on disk (should be a DB file with at least all the checkpoints) # in-memory DB shouldn't have been written to yet up until this point with memory_db.backup("main", prod_db, "main") as backup: while not backup.done: backup.step(100) # Drop most tables (except blocks, transactions, undolog) memory_cursor = memory_db.cursor() for table in blocks.TABLES + ['balances']: memory_cursor.execute('''DROP TABLE IF EXISTS {}'''.format(table)) # Check that all checkpoint blocks are in the database to be tested. if testnet: CHECKPOINTS = check.CHECKPOINTS_TESTNET else: CHECKPOINTS = check.CHECKPOINTS_MAINNET for block_index in CHECKPOINTS.keys(): block_exists = bool(list(memory_cursor.execute('''SELECT * FROM blocks WHERE block_index = ?''', (block_index,)))) assert block_exists, "block #%d does not exist" % block_index # Clean consensus hashes if first block hash don’t match with checkpoint. checkpoints = check.CHECKPOINTS_TESTNET if config.TESTNET else check.CHECKPOINTS_MAINNET columns = [column['name'] for column in memory_cursor.execute('''PRAGMA table_info(blocks)''')] for field in ['ledger_hash', 'txlist_hash']: if field in columns: sql = '''SELECT {} FROM blocks WHERE block_index = ?'''.format(field) first_hash = list(memory_cursor.execute(sql, (config.BLOCK_FIRST,)))[0][field] if first_hash != checkpoints[config.BLOCK_FIRST][field]: logger.info('First hash changed. Cleaning {}.'.format(field)) memory_cursor.execute('''UPDATE blocks SET {} = NULL'''.format(field)) # Initialise missing tables blocks.initialise(memory_db) previous_ledger_hash = None previous_txlist_hash = None previous_messages_hash = None # Reparse each block, if ConsensusError is thrown then the difference memory_cursor.execute('''SELECT * FROM blocks ORDER BY block_index''') for block in memory_cursor.fetchall(): try: util.CURRENT_BLOCK_INDEX = block['block_index'] previous_ledger_hash, previous_txlist_hash, previous_messages_hash, previous_found_messages_hash = blocks.parse_block( memory_db, block['block_index'], block['block_time'], previous_ledger_hash=previous_ledger_hash, ledger_hash=block['ledger_hash'], previous_txlist_hash=previous_txlist_hash, txlist_hash=block['txlist_hash'], previous_messages_hash=previous_messages_hash) logger.info('Block (re-parse): %s (hashes: L:%s / TX:%s / M:%s%s)' % ( block['block_index'], previous_ledger_hash[-5:], previous_txlist_hash[-5:], previous_messages_hash[-5:], (' [overwrote %s]' % previous_found_messages_hash) if previous_found_messages_hash and previous_found_messages_hash != previous_messages_hash else '')) except check.ConsensusError as e: message = str(e) if message.find('ledger_hash') != -1: new_ledger = get_block_ledger(memory_db, block['block_index']) old_ledger = get_block_ledger(prod_db, block['block_index']) compare_strings(old_ledger, new_ledger) elif message.find('txlist_hash') != -1: new_txlist = get_block_txlist(memory_db, block['block_index']) old_txlist = get_block_txlist(prod_db, block['block_index']) compare_strings(old_txlist, new_txlist) raise e
def reparse(testnet=True): """ Reparse all transaction from the database. - Create a new in-memory DB, copy the DB that is on-disk - Reparse DB, automatically compares consensus hashes to the original ones from the on-disk DB """ options = dict(COUNTERPARTYD_OPTIONS) server.initialise(database_file=':memory:', testnet=testnet, **options) logger = logging.getLogger() if testnet: config.PREFIX = b'TESTXXXX' memory_db = database.get_connection(read_only=False) data_dir = appdirs.user_data_dir(appauthor=config.XCP_NAME, appname=config.APP_NAME, roaming=True) prod_db_path = os.path.join( data_dir, '{}{}.db'.format(config.APP_NAME, '.testnet' if testnet else '')) assert os.path.exists( prod_db_path), "database path {} does not exist".format(prod_db_path) prod_db = apsw.Connection(prod_db_path) prod_db.setrowtrace(database.rowtracer) # Copy DB from file on disk (should be a DB file with at least all the checkpoints) # in-memory DB shouldn't have been written to yet up until this point with memory_db.backup("main", prod_db, "main") as backup: while not backup.done: backup.step(100) # Drop most tables (except blocks, transactions, undolog) memory_cursor = memory_db.cursor() for table in blocks.TABLES + ['balances']: memory_cursor.execute('''DROP TABLE IF EXISTS {}'''.format(table)) # Check that all checkpoint blocks are in the database to be tested. if testnet: CHECKPOINTS = check.CHECKPOINTS_TESTNET else: CHECKPOINTS = check.CHECKPOINTS_MAINNET for block_index in CHECKPOINTS.keys(): block_exists = bool( list( memory_cursor.execute( '''SELECT * FROM blocks WHERE block_index = ?''', (block_index, )))) assert block_exists, "block #%d does not exist" % block_index # Clean consensus hashes if first block hash don’t match with checkpoint. checkpoints = check.CHECKPOINTS_TESTNET if config.TESTNET else check.CHECKPOINTS_MAINNET columns = [ column['name'] for column in memory_cursor.execute('''PRAGMA table_info(blocks)''') ] for field in ['ledger_hash', 'txlist_hash']: if field in columns: sql = '''SELECT {} FROM blocks WHERE block_index = ?'''.format( field) first_hash = list( memory_cursor.execute(sql, (config.BLOCK_FIRST, )))[0][field] if first_hash != checkpoints[config.BLOCK_FIRST][field]: logger.info('First hash changed. Cleaning {}.'.format(field)) memory_cursor.execute( '''UPDATE blocks SET {} = NULL'''.format(field)) # Initialise missing tables blocks.initialise(memory_db) previous_ledger_hash = None previous_txlist_hash = None previous_messages_hash = None # Reparse each block, if ConsensusError is thrown then the difference memory_cursor.execute('''SELECT * FROM blocks ORDER BY block_index''') for block in memory_cursor.fetchall(): try: util.CURRENT_BLOCK_INDEX = block['block_index'] previous_ledger_hash, previous_txlist_hash, previous_messages_hash, previous_found_messages_hash = blocks.parse_block( memory_db, block['block_index'], block['block_time'], previous_ledger_hash=previous_ledger_hash, ledger_hash=block['ledger_hash'], previous_txlist_hash=previous_txlist_hash, txlist_hash=block['txlist_hash'], previous_messages_hash=previous_messages_hash) logger.info( 'Block (re-parse): %s (hashes: L:%s / TX:%s / M:%s%s)' % (block['block_index'], previous_ledger_hash[-5:], previous_txlist_hash[-5:], previous_messages_hash[-5:], (' [overwrote %s]' % previous_found_messages_hash) if previous_found_messages_hash and previous_found_messages_hash != previous_messages_hash else '')) except check.ConsensusError as e: message = str(e) if message.find('ledger_hash') != -1: new_ledger = get_block_ledger(memory_db, block['block_index']) old_ledger = get_block_ledger(prod_db, block['block_index']) compare_strings(old_ledger, new_ledger) elif message.find('txlist_hash') != -1: new_txlist = get_block_txlist(memory_db, block['block_index']) old_txlist = get_block_txlist(prod_db, block['block_index']) compare_strings(old_txlist, new_txlist) raise e