def initialise_rawtransactions_db(db): """Drop old raw transaction table, create new one and populate it from unspent_outputs.json.""" if pytest.config.option.savescenarios: server.initialise(database_file=':memory:', testnet=True, **COUNTERPARTYD_OPTIONS) cursor = db.cursor() cursor.execute('DROP TABLE IF EXISTS raw_transactions') cursor.execute('CREATE TABLE IF NOT EXISTS raw_transactions(tx_hash TEXT UNIQUE, tx_hex TEXT)') with open(CURR_DIR + '/fixtures/unspent_outputs.json', 'r') as listunspent_test_file: wallet_unspent = json.load(listunspent_test_file) for output in wallet_unspent: txid = binascii.hexlify(bitcoinlib.core.lx(output['txid'])).decode() tx = backend.deserialize(output['txhex']) cursor.execute('INSERT INTO raw_transactions VALUES (?, ?)', (txid, output['txhex'])) cursor.close()
def setup_function(function): server.initialise(database_file=tempfile.gettempdir()+'/shellparty.unittest.db', rpc_port=9999, rpc_password='******', backend_password='******', testnet=True, testcoin=False) try: os.remove(config.DATABASE) except: pass # Connect to database. global db db = database.get_connection(read_only=False, foreign_keys=False) from shellpartylib.lib import blocks blocks.initialise(db)
def setup_module(): """Initialise the database with default data and wait for server to be ready.""" server.initialise(database_file=tempfile.gettempdir() + '/fixtures.unittest.db', testnet=True, **util_test.COUNTERPARTYD_OPTIONS) db = util_test.restore_database(config.DATABASE, CURR_DIR + '/fixtures/scenarios/unittest_fixture.sql') util.FIRST_MULTISIG_BLOCK_TESTNET = 1 # start RPC server api_server = api.APIServer() api_server.daemon = True api_server.start() for attempt in range(5000): # wait until server is ready. if api_server.is_ready: break elif attempt == 4999: raise Exception("Timeout: RPC server not ready after 5s") else: time.sleep(0.001)
def run_scenario(scenario, rawtransactions_db): """Execute a scenario for integration test, returns a dump of the db, a json with raw transactions and the full log.""" server.initialise(database_file=':memory:', testnet=True, **COUNTERPARTYD_OPTIONS) config.PREFIX = b'TESTXXXX' util.FIRST_MULTISIG_BLOCK_TESTNET = 1 checkpoints = dict(check.CHECKPOINTS_TESTNET) check.CHECKPOINTS_TESTNET = {} logger = logging.getLogger() logger.setLevel(logging.DEBUG) logger_buff = io.StringIO() handler = logging.StreamHandler(logger_buff) handler.setLevel(logging.DEBUG) formatter = logging.Formatter('%(message)s') handler.setFormatter(formatter) logger.addHandler(handler) requests_log = logging.getLogger("requests") requests_log.setLevel(logging.WARNING) asyncio_log = logging.getLogger('asyncio') asyncio_log.setLevel(logging.ERROR) db = database.get_connection(read_only=False) initialise_db(db) raw_transactions = [] for tx in scenario: if tx[0] != 'create_next_block': module = sys.modules['counterpartylib.lib.messages.{}'.format( tx[0])] compose = getattr(module, 'compose') unsigned_tx_hex = transaction.construct(db, compose(db, *tx[1]), **tx[2]) raw_transactions.append({tx[0]: unsigned_tx_hex}) insert_raw_transaction(unsigned_tx_hex, db, rawtransactions_db) else: create_next_block(db, block_index=config.BURN_START + tx[1], parse_block=True) dump = dump_database(db) log = logger_buff.getvalue() db.close() check.CHECKPOINTS_TESTNET = checkpoints return dump, log, json.dumps(raw_transactions, indent=4)
def setup_function(function): server.initialise(database_file=tempfile.gettempdir() + '/counterpartyd.unittest.db', rpc_port=9999, rpc_password='******', backend_password='******', testnet=True, testcoin=False) try: os.remove(config.DATABASE) except: pass # Connect to database. global db db = database.get_connection(read_only=False, foreign_keys=False) from counterpartylib.lib import blocks blocks.initialise(db)
def setup_module(): """Initialise the database with default data and wait for server to be ready.""" server.initialise(database_file=tempfile.gettempdir() + '/fixtures.unittest.db', testnet=True, **util_test.COUNTERPARTYD_OPTIONS) db = util_test.restore_database( config.DATABASE, CURR_DIR + '/fixtures/scenarios/unittest_fixture.sql') util.FIRST_MULTISIG_BLOCK_TESTNET = 1 # start RPC server api_server = api.APIServer() api_server.daemon = True api_server.start() for attempt in range(5000): # wait until server is ready. if api_server.is_ready: break elif attempt == 4999: raise Exception("Timeout: RPC server not ready after 5s") else: time.sleep(0.001)
def run_scenario(scenario, rawtransactions_db): """Execute a scenario for integration test, returns a dump of the db, a json with raw transactions and the full log.""" server.initialise(database_file=':memory:', testnet=True, **COUNTERPARTYD_OPTIONS) config.PREFIX = b'TESTXXXX' util.FIRST_MULTISIG_BLOCK_TESTNET = 1 checkpoints = dict(check.CHECKPOINTS_TESTNET) check.CHECKPOINTS_TESTNET = {} logger = logging.getLogger() logger.setLevel(logging.DEBUG) logger_buff = io.StringIO() handler = logging.StreamHandler(logger_buff) handler.setLevel(logging.DEBUG) formatter = logging.Formatter('%(message)s') handler.setFormatter(formatter) logger.addHandler(handler) requests_log = logging.getLogger("requests") requests_log.setLevel(logging.WARNING) asyncio_log = logging.getLogger('asyncio') asyncio_log.setLevel(logging.ERROR) db = database.get_connection(read_only=False) initialise_db(db) raw_transactions = [] for tx in scenario: if tx[0] != 'create_next_block': module = sys.modules['counterpartylib.lib.messages.{}'.format(tx[0])] compose = getattr(module, 'compose') unsigned_tx_hex = transaction.construct(db, compose(db, *tx[1]), **tx[2]) raw_transactions.append({tx[0]: unsigned_tx_hex}) insert_raw_transaction(unsigned_tx_hex, db, rawtransactions_db) else: create_next_block(db, block_index=config.BURN_START + tx[1], parse_block=True) dump = dump_database(db) log = logger_buff.getvalue() db.close() check.CHECKPOINTS_TESTNET = checkpoints return dump, log, json.dumps(raw_transactions, indent=4)
def test_check_database_version(): server.initialise(database_file=tempfile.gettempdir() + '/fixtures.unittest.db', testnet=True, **util_test.COUNTERPARTYD_OPTIONS) util_test.restore_database(config.DATABASE, CURR_DIR + '/fixtures/scenarios/unittest_fixture.sql') db = database.get_connection(read_only=False) database.update_version(db) version_major, version_minor = database.version(db) assert config.VERSION_MAJOR == version_major assert config.VERSION_MINOR == version_minor check.database_version(db) config.VERSION_MINOR += 1 with pytest.raises(check.DatabaseVersionError) as exception: check.database_version(db) assert exception.value.reparse_block_index == None config.VERSION_MAJOR += 1 with pytest.raises(check.DatabaseVersionError) as exception: check.database_version(db) assert exception.value.reparse_block_index == config.BLOCK_FIRST
def initialise_rawtransactions_db(db): """Drop old raw transaction table, create new one and populate it from unspent_outputs.json.""" if pytest.config.option.savescenarios: server.initialise(database_file=':memory:', testnet=True, **COUNTERPARTYD_OPTIONS) cursor = db.cursor() cursor.execute('DROP TABLE IF EXISTS raw_transactions') cursor.execute( 'CREATE TABLE IF NOT EXISTS raw_transactions(tx_hash TEXT UNIQUE, tx_hex TEXT)' ) with open(CURR_DIR + '/fixtures/unspent_outputs.json', 'r') as listunspent_test_file: wallet_unspent = json.load(listunspent_test_file) for output in wallet_unspent: txid = binascii.hexlify(bitcoinlib.core.lx( output['txid'])).decode() tx = backend.deserialize(output['txhex']) cursor.execute('INSERT INTO raw_transactions VALUES (?, ?)', (txid, output['txhex'])) cursor.close()
def reparse(testnet=True): """Reparse all transaction from the database, create a new blockchain and compare it to the old one.""" options = dict(COUNTERPARTYD_OPTIONS) server.initialise(database_file=':memory:', testnet=testnet, **options) if testnet: config.PREFIX = b'TESTXXXX' logger = logging.getLogger() console = logging.StreamHandler() console.setLevel(logging.INFO) formatter = logging.Formatter('%(message)s') console.setFormatter(formatter) logger.addHandler(console) memory_db = database.get_connection(read_only=False) initialise_db(memory_db) data_dir = appdirs.user_data_dir(appauthor=config.XCP_NAME, appname=config.APP_NAME, roaming=True) prod_db_path = os.path.join(data_dir, '{}{}.db'.format(config.APP_NAME, '.testnet' if testnet else '')) prod_db = apsw.Connection(prod_db_path) prod_db.setrowtrace(database.rowtracer) with memory_db.backup("main", prod_db, "main") as backup: backup.step() # Here we don’t use block.reparse() because it reparse db in transaction (`with db`). memory_cursor = memory_db.cursor() for table in blocks.TABLES + ['balances']: memory_cursor.execute('''DROP TABLE IF EXISTS {}'''.format(table)) # Check that all checkpoint blocks are in the database to be tested. if testnet: CHECKPOINTS = check.CHECKPOINTS_TESTNET else: CHECKPOINTS = check.CHECKPOINTS_MAINNET for block_index in CHECKPOINTS.keys(): block_exists = bool(list(memory_cursor.execute('''SELECT * FROM blocks WHERE block_index = ?''', (block_index,)))) assert block_exists # Clean consensus hashes if first block hash don’t match with checkpoint. checkpoints = check.CHECKPOINTS_TESTNET if config.TESTNET else check.CHECKPOINTS_MAINNET columns = [column['name'] for column in memory_cursor.execute('''PRAGMA table_info(blocks)''')] for field in ['ledger_hash', 'txlist_hash']: if field in columns: sql = '''SELECT {} FROM blocks WHERE block_index = ?'''.format(field) first_hash = list(memory_cursor.execute(sql, (config.BLOCK_FIRST,)))[0][field] if first_hash != checkpoints[config.BLOCK_FIRST][field]: logger.info('First hash changed. Cleaning {}.'.format(field)) memory_cursor.execute('''UPDATE blocks SET {} = NULL'''.format(field)) blocks.initialise(memory_db) previous_ledger_hash = None previous_txlist_hash = None previous_messages_hash = None memory_cursor.execute('''SELECT * FROM blocks ORDER BY block_index''') for block in memory_cursor.fetchall(): try: util.CURRENT_BLOCK_INDEX = block['block_index'] previous_ledger_hash, previous_txlist_hash, previous_messages_hash, previous_found_messages_hash = blocks.parse_block( memory_db, block['block_index'], block['block_time'], previous_ledger_hash=previous_ledger_hash, ledger_hash=block['ledger_hash'], previous_txlist_hash=previous_txlist_hash, txlist_hash=block['txlist_hash'], previous_messages_hash=previous_messages_hash) logger.info('Block (re-parse): %s (hashes: L:%s / TX:%s / M:%s%s)' % ( block['block_index'], previous_ledger_hash[-5:], previous_txlist_hash[-5:], previous_messages_hash[-5:], (' [overwrote %s]' % previous_found_messages_hash) if previous_found_messages_hash and previous_found_messages_hash != previous_messages_hash else '')) except check.ConsensusError as e: message = str(e) if message.find('ledger_hash') != -1: new_ledger = get_block_ledger(memory_db, block['block_index']) old_ledger = get_block_ledger(prod_db, block['block_index']) compare_strings(old_ledger, new_ledger) elif message.find('txlist_hash') != -1: new_txlist = get_block_txlist(memory_db, block['block_index']) old_txlist = get_block_txlist(prod_db, block['block_index']) compare_strings(old_txlist, new_txlist) raise(e)
def reparse(testnet=True): """Reparse all transaction from the database, create a new blockchain and compare it to the old one.""" options = dict(COUNTERPARTYD_OPTIONS) server.initialise(database_file=':memory:', testnet=testnet, **options) if testnet: config.PREFIX = b'TESTXXXX' logger = logging.getLogger() console = logging.StreamHandler() console.setLevel(logging.INFO) formatter = logging.Formatter('%(message)s') console.setFormatter(formatter) logger.addHandler(console) memory_db = database.get_connection(read_only=False) initialise_db(memory_db) data_dir = appdirs.user_data_dir(appauthor=config.XCP_NAME, appname=config.APP_NAME, roaming=True) prod_db_path = os.path.join( data_dir, '{}{}.db'.format(config.APP_NAME, '.testnet' if testnet else '')) prod_db = apsw.Connection(prod_db_path) prod_db.setrowtrace(database.rowtracer) with memory_db.backup("main", prod_db, "main") as backup: backup.step() # Here we don’t use block.reparse() because it reparse db in transaction (`with db`). memory_cursor = memory_db.cursor() for table in blocks.TABLES + ['balances']: memory_cursor.execute('''DROP TABLE IF EXISTS {}'''.format(table)) # Check that all checkpoint blocks are in the database to be tested. if testnet: CHECKPOINTS = check.CHECKPOINTS_TESTNET else: CHECKPOINTS = check.CHECKPOINTS_MAINNET for block_index in CHECKPOINTS.keys(): block_exists = bool( list( memory_cursor.execute( '''SELECT * FROM blocks WHERE block_index = ?''', (block_index, )))) assert block_exists # Clean consensus hashes if first block hash don’t match with checkpoint. checkpoints = check.CHECKPOINTS_TESTNET if config.TESTNET else check.CHECKPOINTS_MAINNET columns = [ column['name'] for column in memory_cursor.execute('''PRAGMA table_info(blocks)''') ] for field in ['ledger_hash', 'txlist_hash']: if field in columns: sql = '''SELECT {} FROM blocks WHERE block_index = ?'''.format( field) first_hash = list( memory_cursor.execute(sql, (config.BLOCK_FIRST, )))[0][field] if first_hash != checkpoints[config.BLOCK_FIRST][field]: logger.info('First hash changed. Cleaning {}.'.format(field)) memory_cursor.execute( '''UPDATE blocks SET {} = NULL'''.format(field)) blocks.initialise(memory_db) previous_ledger_hash = None previous_txlist_hash = None memory_cursor.execute('''SELECT * FROM blocks ORDER BY block_index''') for block in memory_cursor.fetchall(): try: logger.info('Block (re‐parse): {}'.format(str( block['block_index']))) util.CURRENT_BLOCK_INDEX = block['block_index'] # TODO: Correct?! previous_ledger_hash, previous_txlist_hash = blocks.parse_block( memory_db, block['block_index'], block['block_time'], previous_ledger_hash, block['ledger_hash'], previous_txlist_hash, block['txlist_hash']) except check.ConsensusError as e: message = str(e) if message.find('ledger_hash') != -1: new_ledger = get_block_ledger(memory_db, block['block_index']) old_ledger = get_block_ledger(prod_db, block['block_index']) compare_strings(old_ledger, new_ledger) elif message.find('txlist_hash') != -1: new_txlist = get_block_txlist(memory_db, block['block_index']) old_txlist = get_block_txlist(prod_db, block['block_index']) compare_strings(old_txlist, new_txlist) raise (e)
if __name__ == "__main__": import options, sys, server, logging, db, os.path logging.info("Starting %s.", server.server_name()) logging.debug("Command logging is %sabled.", "en" if options.args.log_commands else "dis") from twisted.internet import reactor if not os.path.isfile(options.args.dump_file): logging.info("Creating empty database.") with open(options.args.dump_file, "w") as f: f.write("{}") db.load() server.port = server.initialise() reactor.run()