Exemple #1
0
 def run(self):
     try:
         chancecoind.set_options()
         db = util.connect_to_db()
         blocks.follow(db)
     except:
         pass
Exemple #2
0
 def run(self):
     try:
         chancecoind.set_options()
         db = util.connect_to_db()
         blocks.follow(db)
     except:
         pass
Exemple #3
0
def do_book(testnet):
    # Filenames.
    if testnet:
        filename = 'book.testnet'
    else:
        filename = 'book.mainnet'
    old = CURR_DIR + '/' + filename
    new = old + '.new'

    # Get last block_index of old book.
    with open(old, 'r') as f:
        block_index = int(f.readlines()[-1][7:13])

    # Use temporary DB.
    counterpartyd.set_options(testnet=testnet, carefulness=60)
    default_db = config.DATABASE
    temp_db = tempfile.gettempdir() + '/' + os.path.basename(config.DATABASE)
    shutil.copyfile(default_db, temp_db)
    counterpartyd.set_options(database_file=temp_db,
                              testnet=testnet,
                              carefulness=60)
    db = util.connect_to_db()
    cursor = db.cursor()

    # TODO: USE API
    import subprocess
    if testnet:
        subprocess.check_call([
            'counterpartyd.py', '--database-file=' + temp_db, '--testnet',
            '--force', '--carefulness=60', 'reparse'
        ])
    else:
        subprocess.check_call([
            'counterpartyd.py', '--database-file=' + temp_db,
            '--carefulness=60', 'reparse'
        ])

    # Get new book.
    with open(new, 'w') as f:
        # Credits.
        cursor.execute(
            'select * from credits where block_index <= ? order by block_index, address, asset',
            (block_index, ))
        for credit in list(cursor):
            f.write('credit ' + str(summarise(credit)) + '\n')
        # Debits.
        cursor.execute(
            'select * from debits where block_index <= ? order by block_index, address, asset',
            (block_index, ))
        for debit in cursor.fetchall():
            f.write('debit ' + str(summarise(debit)) + '\n')

    # Compare books.
    compare(filename)

    # Clean up.
    cursor.close()
    os.remove(temp_db)
Exemple #4
0
def reparse(testnet=True):
    options = dict(COUNTERPARTYD_OPTIONS)
    options.pop('data_dir')
    counterpartyd.set_options(database_file=':memory:',
                              testnet=testnet,
                              **options)

    if testnet:
        config.PREFIX = b'TESTXXXX'

    logger = logging.getLogger()
    console = logging.StreamHandler()
    console.setLevel(logging.INFO)
    formatter = logging.Formatter('%(message)s')
    console.setFormatter(formatter)
    logger.addHandler(console)

    memory_db = util.connect_to_db()
    initialise_db(memory_db)

    prod_db_path = os.path.join(
        config.DATA_DIR, '{}.{}{}.db'.format(config.XCP_CLIENT,
                                             str(config.VERSION_MAJOR),
                                             '.testnet' if testnet else ''))
    prod_db = apsw.Connection(prod_db_path)
    prod_db.setrowtrace(util.rowtracer)

    with memory_db.backup("main", prod_db, "main") as backup:
        backup.step()

    # here we don't use block.reparse() because it reparse db in transaction (`with db`)
    memory_cursor = memory_db.cursor()
    for table in blocks.TABLES + ['balances']:
        memory_cursor.execute('''DROP TABLE IF EXISTS {}'''.format(table))
    blocks.initialise(memory_db)
    previous_ledger_hash = None
    previous_txlist_hash = None
    memory_cursor.execute('''SELECT * FROM blocks ORDER BY block_index''')
    for block in memory_cursor.fetchall():
        try:
            logger.info('Block (re‐parse): {}'.format(str(
                block['block_index'])))
            previous_ledger_hash, previous_txlist_hash = blocks.parse_block(
                memory_db, block['block_index'], block['block_time'],
                previous_ledger_hash, block['ledger_hash'],
                previous_txlist_hash, block['txlist_hash'])
        except ConsensusError as e:
            message = str(e)
            if message.find('ledger_hash') != -1:
                new_ledger = get_block_ledger(memory_db, block['block_index'])
                old_ledger = get_block_ledger(prod_db, block['block_index'])
                compare_strings(old_ledger, new_ledger)
            elif message.find('txlist_hash') != -1:
                new_txlist = get_block_txlist(memory_db, block['block_index'])
                old_txlist = get_block_txlist(prod_db, block['block_index'])
                compare_strings(old_txlist, new_txlist)
            raise (e)
Exemple #5
0
def reparse(testnet=True):
    options = dict(COUNTERPARTYD_OPTIONS)
    options.pop('data_dir')
    bluejudyd.set_options(database_file=':memory:', testnet=testnet, **options)
    
    if testnet:
        config.PREFIX = b'TESTXXXX'

    logger = logging.getLogger()
    console = logging.StreamHandler()
    console.setLevel(logging.INFO)
    formatter = logging.Formatter('%(message)s')
    console.setFormatter(formatter)
    logger.addHandler(console)

    memory_db = util.connect_to_db()
    initialise_db(memory_db)
    
    prod_db_path = os.path.join(config.DATA_DIR, '{}.{}{}.db'.format(config.XBJ_CLIENT, str(config.VERSION_MAJOR), '.testnet' if testnet else ''))
    prod_db = apsw.Connection(prod_db_path)
    prod_db.setrowtrace(util.rowtracer)

    with memory_db.backup("main", prod_db, "main") as backup:
        backup.step()

    # here we don't use block.reparse() because it reparse db in transaction (`with db`)
    memory_cursor = memory_db.cursor()
    for table in blocks.TABLES + ['balances']:
        memory_cursor.execute('''DROP TABLE IF EXISTS {}'''.format(table))
    blocks.initialise(memory_db)
    previous_ledger_hash = None
    previous_txlist_hash = None
    memory_cursor.execute('''SELECT * FROM blocks ORDER BY block_index''')
    for block in memory_cursor.fetchall():
        try:
            logger.info('Block (re‐parse): {}'.format(str(block['block_index'])))
            previous_ledger_hash, previous_txlist_hash = blocks.parse_block(memory_db, block['block_index'], block['block_time'], 
                                                                                    previous_ledger_hash, block['ledger_hash'],
                                                                                    previous_txlist_hash, block['txlist_hash'])
        except ConsensusError as e:
            message = str(e)
            if message.find('ledger_hash') != -1:
                new_ledger = get_block_ledger(memory_db, block['block_index'])
                old_ledger = get_block_ledger(prod_db, block['block_index'])
                compare_strings(old_ledger, new_ledger)
            elif message.find('txlist_hash') != -1:
                new_txlist = get_block_txlist(memory_db, block['block_index'])
                old_txlist = get_block_txlist(prod_db, block['block_index'])
                compare_strings(old_txlist, new_txlist)
            raise(e)
def setup_function(function):
    counterpartyd.set_options(rpc_port=9999, data_dir=tempfile.gettempdir(), database_file=tempfile.gettempdir()+'/counterpartyd.unittest.db', 
                              rpc_password='******', backend_rpc_password='******',
                              testnet=True, testcoin=False, backend_rpc_ssl_verify=False)
    try:
        os.remove(config.DATABASE)
    except:
        pass

    # Connect to database.
    global db
    db = util.connect_to_db(foreign_keys=False)
    from lib import blocks
    blocks.initialise(db)
Exemple #7
0
def do_book(testnet):
    # Filenames.
    if testnet:
        filename = 'book.testnet'
    else:
        filename = 'book.mainnet'
    old = CURR_DIR + '/' + filename
    new = old + '.new'

    # Get last block_index of old book.
    with open(old, 'r') as f:
        block_index = int(f.readlines()[-1][7:13])

    # Use temporary DB.
    clearinghoused.set_options(testnet=testnet)
    default_db = config.DATABASE
    temp_db = tempfile.gettempdir() + '/' + os.path.basename(config.DATABASE)
    shutil.copyfile(default_db, temp_db)
    clearinghoused.set_options(database_file=temp_db, testnet=testnet)
    db = util.connect_to_db()
    cursor = db.cursor()

    # TODO: USE API
    import subprocess
    if testnet:
        subprocess.check_call(['./clearinghoused.py', '--database-file=' + temp_db, '--testnet', 'reparse'])
    else:
        subprocess.check_call(['./clearinghoused.py', '--database-file=' + temp_db, 'reparse'])

    # Get new book.
    with open(new, 'w') as f:
        # Credits.
        cursor.execute('select * from credits where block_index <= ? order by block_index, address, asset', (block_index,))
        for credit in list(cursor):
            f.write('credit ' + str(summarise(credit)) + '\n')
        # Debits.
        cursor.execute('select * from debits where block_index <= ? order by block_index, address, asset', (block_index,))
        for debit in cursor.fetchall():
            f.write('debit ' + str(summarise(debit)) + '\n')

    # Compare books.
    compare(filename)

    # Clean up.
    cursor.close()
    os.remove(temp_db)
Exemple #8
0
def run_scenario(scenario, rawtransactions_db):
    litetokensd.set_options(database_file=':memory:',
                            testnet=True,
                            **COUNTERPARTYD_OPTIONS)
    config.PREFIX = b'TESTXXXX'
    config.FIRST_MULTISIG_BLOCK_TESTNET = 1
    config.CHECKPOINTS_TESTNET = {}
    logger = logging.getLogger()
    logger.setLevel(logging.DEBUG)
    logger_buff = io.StringIO()
    handler = logging.StreamHandler(logger_buff)
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter('%(message)s')
    handler.setFormatter(formatter)
    logger.addHandler(handler)
    requests_log = logging.getLogger("requests")
    requests_log.setLevel(logging.WARNING)
    asyncio_log = logging.getLogger('asyncio')
    asyncio_log.setLevel(logging.ERROR)

    db = util.connect_to_db()
    initialise_db(db)

    raw_transactions = []
    for transaction in scenario:
        if transaction[0] != 'create_next_block':
            module = sys.modules['lib.{}'.format(transaction[0])]
            compose = getattr(module, 'compose')
            unsigned_tx_hex = litecoin.transaction(
                db, compose(db, *transaction[1]), **transaction[2])
            raw_transactions.append({transaction[0]: unsigned_tx_hex})
            insert_raw_transaction(unsigned_tx_hex, db, rawtransactions_db)
        else:
            create_next_block(db,
                              block_index=config.BURN_START + transaction[1],
                              parse_block=True)

    dump = dump_database(db)
    log = logger_buff.getvalue()

    db.close()
    return dump, log, json.dumps(raw_transactions, indent=4)
Exemple #9
0
def setup_function(function):
    counterpartyd.set_options(rpc_port=9999,
                              data_dir=tempfile.gettempdir(),
                              database_file=tempfile.gettempdir() +
                              '/counterpartyd.unittest.db',
                              rpc_password='******',
                              backend_rpc_password='******',
                              testnet=True,
                              testcoin=False,
                              backend_rpc_ssl_verify=False)
    try:
        os.remove(config.DATABASE)
    except:
        pass

    # Connect to database.
    global db
    db = util.connect_to_db(foreign_keys=False)
    from lib import blocks
    blocks.initialise(db)
Exemple #10
0
def run_scenario(scenario, rawtransactions_db):
    counterpartyd.set_options(database_file=':memory:', testnet=True, **COUNTERPARTYD_OPTIONS)
    config.PREFIX = b'TESTXXXX'
    util.FIRST_MULTISIG_BLOCK_TESTNET = 1
    checkpoints = dict(config.CHECKPOINTS_TESTNET)
    config.CHECKPOINTS_TESTNET = {}
    logger = logging.getLogger()
    logger.setLevel(logging.DEBUG)
    logger_buff = io.StringIO()
    handler = logging.StreamHandler(logger_buff)
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter('%(message)s')
    handler.setFormatter(formatter)
    logger.addHandler(handler)
    requests_log = logging.getLogger("requests")
    requests_log.setLevel(logging.WARNING)
    asyncio_log = logging.getLogger('asyncio')
    asyncio_log.setLevel(logging.ERROR)

    db = util.connect_to_db()
    initialise_db(db)

    raw_transactions = []
    for transaction in scenario:
        if transaction[0] != 'create_next_block':
            module = sys.modules['lib.messages.{}'.format(transaction[0])]
            compose = getattr(module, 'compose')
            unsigned_tx_hex = bitcoin.transaction(db, compose(db, *transaction[1]), **transaction[2])
            raw_transactions.append({transaction[0]: unsigned_tx_hex})
            insert_raw_transaction(unsigned_tx_hex, db, rawtransactions_db)
        else:
            create_next_block(db, block_index=config.BURN_START + transaction[1], parse_block=True)

    dump = dump_database(db)
    log = logger_buff.getvalue()

    db.close()
    config.CHECKPOINTS_TESTNET = checkpoints
    return dump, log, json.dumps(raw_transactions, indent=4)
Exemple #11
0
                          unittest=True)

# unit tests private keys
config.UNITTEST_PRIVKEY = {
    'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc':
    'cPdUqd5EbBWsjcG9xiL1hz8bEyGFiz4SW99maU9JgpL9TEcxUf3j',
    'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns':
    'cQ897jnCVRrawNbw8hgmjMiRNHejwzg4KbzdMCzc91iaTif8ReqX'
}

# Connect to database.
try:
    os.remove(config.DATABASE)
except:
    pass
db = util.connect_to_db()
cursor = db.cursor()

# Each tx has a block_index equal to its tx_index
tx_index = 0

source_default = 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc'
destination_default = 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns'
quantity = config.UNIT
small = round(quantity / 2)
expiration = 10
fee_required = 900000
fee_provided = 1000000
fee_multiplier_default = .05
move_random_hash_default = '6a886d74c2d4b1d7a35fd9159333ef64ba45a04d7aeeeb4538f958603c16fc5d'
rps_random_default = '7a4488d61ed8f2e9fa2874113fccb8b1'
def counterpartyd_db(request):
    db = util.connect_to_db()
    cursor = db.cursor()
    cursor.execute('''BEGIN''')
    request.addfinalizer(lambda: cursor.execute('''ROLLBACK'''))
    return db
                bitcoind_rpc_connect=args.bitcoind_rpc_connect, bitcoind_rpc_port=args.bitcoind_rpc_port,
                bitcoind_rpc_user=args.bitcoind_rpc_user, bitcoind_rpc_password=args.bitcoind_rpc_password,
                insight_enable=args.insight_enable, insight_connect=args.insight_connect, insight_port=args.insight_port,
                rpc_host=args.rpc_host, rpc_port=args.rpc_port, rpc_user=args.rpc_user, rpc_password=args.rpc_password,
                log_file=args.log_file, pid_file=args.pid_file,
                api_num_threads=args.api_num_threads, api_request_queue_size=args.api_request_queue_size,
                database_file=args.database_file, testnet=args.testnet, testcoin=args.testcoin, unittest=False, careful=args.careful, no_interaction=args.no_interaction)

    #Create/update pid file
    pid = str(os.getpid())
    pidf = open(config.PID, 'w')
    pidf.write(pid)
    pidf.close()    

    # Database
    db = util.connect_to_db()

    # Logging (to file and console).
    logger = logging.getLogger() #get root logger
    logger.setLevel(logging.DEBUG if args.verbose else logging.INFO)
    #Console logging
    console = logging.StreamHandler()
    console.setLevel(logging.DEBUG if args.verbose else logging.INFO)
    formatter = logging.Formatter('%(message)s')
    console.setFormatter(formatter)
    logger.addHandler(console)
    #File logging (rotated)
    max_log_size = 2 * 1024 * 1024 #max log size of 2 MB before rotation (make configurable later)
    if os.name == 'nt':
        fileh = util_windows.SanitizedRotatingFileHandler(config.LOG, maxBytes=max_log_size, backupCount=5)
    else:
Exemple #14
0
def reparse(testnet=True):
    options = dict(COUNTERPARTYD_OPTIONS)
    options.pop('data_dir')
    counterpartyd.set_options(database_file=':memory:', testnet=testnet, **options)

    if testnet:
        config.PREFIX = b'TESTXXXX'

    logger = logging.getLogger()
    console = logging.StreamHandler()
    console.setLevel(logging.INFO)
    formatter = logging.Formatter('%(message)s')
    console.setFormatter(formatter)
    logger.addHandler(console)

    memory_db = util.connect_to_db()
    initialise_db(memory_db)

    prod_db_path = os.path.join(config.DATA_DIR, '{}.{}{}.db'.format(config.XCP_CLIENT, str(config.VERSION_MAJOR), '.testnet' if testnet else ''))
    prod_db = apsw.Connection(prod_db_path)
    prod_db.setrowtrace(util.rowtracer)

    with memory_db.backup("main", prod_db, "main") as backup:
        backup.step()

    # here we don't use block.reparse() because it reparse db in transaction (`with db`)
    memory_cursor = memory_db.cursor()
    for table in blocks.TABLES + ['balances']:
        memory_cursor.execute('''DROP TABLE IF EXISTS {}'''.format(table))

    # clean consensus hashes if first block hash don't match with checkpoint.
    checkpoints = config.CHECKPOINTS_TESTNET if config.TESTNET else config.CHECKPOINTS_MAINNET
    columns = [column['name'] for column in memory_cursor.execute('''PRAGMA table_info(blocks)''')]
    for field in ['ledger_hash', 'txlist_hash']:
        if field in columns:
            sql = '''SELECT {} FROM blocks  WHERE block_index = ?'''.format(field)
            first_hash = list(memory_cursor.execute(sql, (config.BLOCK_FIRST,)))[0][field]
            if first_hash != checkpoints[config.BLOCK_FIRST][field]:
                logging.info('First hash changed. Cleaning {}.'.format(field))
                memory_cursor.execute('''UPDATE blocks SET {} = NULL'''.format(field))

    blocks.initialise(memory_db)
    previous_ledger_hash = None
    previous_txlist_hash = None
    memory_cursor.execute('''SELECT * FROM blocks ORDER BY block_index''')
    for block in memory_cursor.fetchall():
        try:
            logger.info('Block (re‐parse): {}'.format(str(block['block_index'])))
            previous_ledger_hash, previous_txlist_hash = blocks.parse_block(memory_db, block['block_index'], block['block_time'],
                                                                                    previous_ledger_hash, block['ledger_hash'],
                                                                                    previous_txlist_hash, block['txlist_hash'])
        except blocks.ConsensusError as e:
            message = str(e)
            if message.find('ledger_hash') != -1:
                new_ledger = get_block_ledger(memory_db, block['block_index'])
                old_ledger = get_block_ledger(prod_db, block['block_index'])
                compare_strings(old_ledger, new_ledger)
            elif message.find('txlist_hash') != -1:
                new_txlist = get_block_txlist(memory_db, block['block_index'])
                old_txlist = get_block_txlist(prod_db, block['block_index'])
                compare_strings(old_txlist, new_txlist)
            raise(e)