def insert_raw_transaction(raw_transaction, db): """Add a raw transaction to the database.""" cursor = db.cursor() # one transaction per block block_index, block_hash, block_time = create_next_block(db, parse_block=False) tx_hash = dummy_tx_hash(raw_transaction) tx = None tx_index = block_index - config.BURN_START + 1 try: source, destination, btc_amount, fee, data, extra = blocks._get_tx_info( raw_transaction) transaction = (tx_index, tx_hash, block_index, block_hash, block_time, source, destination, btc_amount, fee, data, True) cursor.execute( '''INSERT INTO transactions VALUES (?,?,?,?,?,?,?,?,?,?,?)''', transaction) tx = list( cursor.execute('''SELECT * FROM transactions WHERE tx_index = ?''', (tx_index, )))[0] except exceptions.BTCOnlyError: pass cursor.close() MOCK_UTXO_SET.add_raw_transaction(raw_transaction, tx_id=tx_hash, confirmations=1) util.CURRENT_BLOCK_INDEX = block_index blocks.parse_block(db, block_index, block_time) return tx_hash, tx
def insert_unconfirmed_raw_transaction(raw_transaction, db): """Add a raw transaction to the database.""" # one transaction per block cursor = db.cursor() tx_hash = dummy_tx_hash(raw_transaction) # this isn't really correct, but it will do tx_index = list(cursor.execute('''SELECT tx_index FROM transactions ORDER BY tx_index DESC LIMIT 1''')) tx_index = tx_index[0]['tx_index'] if len(tx_index) else 0 tx_index = tx_index + 1 source, destination, btc_amount, fee, data = blocks._get_tx_info(raw_transaction) tx = { 'tx_index': tx_index, 'tx_hash': tx_hash, 'block_index': config.MEMPOOL_BLOCK_INDEX, 'block_hash': config.MEMPOOL_BLOCK_HASH, 'block_time': int(time.time()), 'source': source, 'destination': destination, 'btc_amount': btc_amount, 'fee': fee, 'data': data, 'supported': True } cursor.close() MOCK_UTXO_SET.add_raw_transaction(raw_transaction, tx_id=tx_hash, confirmations=0) return tx
def insert_raw_transaction(raw_transaction, db, rawtransactions_db): """Add a raw transaction to the database.""" # one transaction per block block_index, block_hash, block_time = create_next_block(db, parse_block=False) cursor = db.cursor() tx_index = block_index - config.BURN_START + 1 tx_hash = hashlib.sha256('{}{}'.format( tx_index, raw_transaction).encode('utf-8')).hexdigest() # print(tx_hash) # Remember to add it to the log dump if pytest.config.option.savescenarios: save_rawtransaction(rawtransactions_db, tx_hash, raw_transaction) source, destination, btc_amount, fee, data = blocks._get_tx_info( raw_transaction) transaction = (tx_index, tx_hash, block_index, block_hash, block_time, source, destination, btc_amount, fee, data, True) cursor.execute( '''INSERT INTO transactions VALUES (?,?,?,?,?,?,?,?,?,?,?)''', transaction) tx = list( cursor.execute('''SELECT * FROM transactions WHERE tx_index = ?''', (tx_index, )))[0] cursor.close() util.CURRENT_BLOCK_INDEX = block_index # TODO: Correct?! blocks.parse_block(db, block_index, block_time) return tx
def insert_raw_transaction(raw_transaction, db): """Add a raw transaction to the database.""" cursor = db.cursor() # one transaction per block block_index, block_hash, block_time = create_next_block(db, parse_block=False) tx_hash = dummy_tx_hash(raw_transaction) tx_index = block_index - config.BURN_START + 1 source, destination, btc_amount, fee, data = blocks._get_tx_info(raw_transaction) transaction = (tx_index, tx_hash, block_index, block_hash, block_time, source, destination, btc_amount, fee, data, True) cursor.execute('''INSERT INTO transactions VALUES (?,?,?,?,?,?,?,?,?,?,?)''', transaction) tx = list(cursor.execute('''SELECT * FROM transactions WHERE tx_index = ?''', (tx_index,)))[0] cursor.close() MOCK_UTXO_SET.add_raw_transaction(raw_transaction, tx_id=tx_hash, confirmations=1) util.CURRENT_BLOCK_INDEX = block_index blocks.parse_block(db, block_index, block_time) return tx
def insert_unconfirmed_raw_transaction(raw_transaction, db): """Add a raw transaction to the database.""" # one transaction per block cursor = db.cursor() tx_hash = dummy_tx_hash(raw_transaction) # this isn't really correct, but it will do tx_index = list( cursor.execute( '''SELECT tx_index FROM transactions ORDER BY tx_index DESC LIMIT 1''' )) tx_index = tx_index[0]['tx_index'] if len(tx_index) else 0 tx_index = tx_index + 1 source, destination, btc_amount, fee, data, extra = blocks._get_tx_info( raw_transaction) tx = { 'tx_index': tx_index, 'tx_hash': tx_hash, 'block_index': config.MEMPOOL_BLOCK_INDEX, 'block_hash': config.MEMPOOL_BLOCK_HASH, 'block_time': int(time.time()), 'source': source, 'destination': destination, 'btc_amount': btc_amount, 'fee': fee, 'data': data, 'supported': True } cursor.close() MOCK_UTXO_SET.add_raw_transaction(raw_transaction, tx_id=tx_hash, confirmations=0) return tx
def insert_raw_transaction(raw_transaction, db, rawtransactions_db): """Add a raw transaction to the database.""" # one transaction per block block_index, block_hash, block_time = create_next_block(db, parse_block=False) cursor = db.cursor() tx_index = block_index - config.BURN_START + 1 tx_hash = hashlib.sha256('{}{}'.format(tx_index,raw_transaction).encode('utf-8')).hexdigest() # print(tx_hash) # Remember to add it to the log dump if pytest.config.option.savescenarios: save_rawtransaction(rawtransactions_db, tx_hash, raw_transaction) source, destination, btc_amount, fee, data = blocks._get_tx_info(raw_transaction) transaction = (tx_index, tx_hash, block_index, block_hash, block_time, source, destination, btc_amount, fee, data, True) cursor.execute('''INSERT INTO transactions VALUES (?,?,?,?,?,?,?,?,?,?,?)''', transaction) tx = list(cursor.execute('''SELECT * FROM transactions WHERE tx_index = ?''', (tx_index,)))[0] cursor.close() util.CURRENT_BLOCK_INDEX = block_index # TODO: Correct?! blocks.parse_block(db, block_index, block_time) return tx
def construct (db, tx_info, encoding='auto', fee_per_kb=config.DEFAULT_FEE_PER_KB, estimate_fee_per_kb=None, estimate_fee_per_kb_nblocks=config.ESTIMATE_FEE_NBLOCKS, regular_dust_size=config.DEFAULT_REGULAR_DUST_SIZE, multisig_dust_size=config.DEFAULT_MULTISIG_DUST_SIZE, op_return_value=config.DEFAULT_OP_RETURN_VALUE, exact_fee=None, fee_provided=0, provided_pubkeys=None, dust_return_pubkey=None, allow_unconfirmed_inputs=False, unspent_tx_hash=None, custom_inputs=None, disable_utxo_locks=False): if estimate_fee_per_kb is None: estimate_fee_per_kb = config.ESTIMATE_FEE_PER_KB global UTXO_LOCKS desired_encoding = encoding (source, destination_outputs, data) = tx_info if dust_return_pubkey: dust_return_pubkey = binascii.unhexlify(dust_return_pubkey) # Source. # If public key is necessary for construction of (unsigned) # transaction, use the public key provided, or find it from the # blockchain. if source: script.validate(source) source_is_p2sh = script.is_p2sh(source) # Sanity checks. if exact_fee and not isinstance(exact_fee, int): raise exceptions.TransactionError('Exact fees must be in satoshis.') if not isinstance(fee_provided, int): raise exceptions.TransactionError('Fee provided must be in satoshis.') if UTXO_LOCKS is None and config.UTXO_LOCKS_MAX_ADDRESSES > 0: # initialize if configured UTXO_LOCKS = util.DictCache(size=config.UTXO_LOCKS_MAX_ADDRESSES) '''Destinations''' # Destination outputs. # Replace multi‐sig addresses with multi‐sig pubkeys. Check that the # destination output isn’t a dust output. Set null values to dust size. destination_outputs_new = [] for (address, value) in destination_outputs: # Value. if script.is_multisig(address): dust_size = multisig_dust_size else: dust_size = regular_dust_size if value == None: value = dust_size elif value < dust_size: raise exceptions.TransactionError('Destination output is dust.') # Address. script.validate(address) if script.is_multisig(address): destination_outputs_new.append((backend.multisig_pubkeyhashes_to_pubkeys(address, provided_pubkeys), value)) else: destination_outputs_new.append((address, value)) destination_outputs = destination_outputs_new destination_btc_out = sum([value for address, value in destination_outputs]) '''Data''' if data: # Data encoding methods (choose and validate). if encoding == 'auto': if len(data) + len(config.PREFIX) <= config.OP_RETURN_MAX_SIZE: encoding = 'opreturn' else: encoding = 'multisig' elif encoding not in ('pubkeyhash', 'multisig', 'opreturn'): raise exceptions.TransactionError('Unknown encoding‐scheme.') if encoding == 'multisig': # dust_return_pubkey should be set or explicitly set to False to use the default configured for the node # the default for the node is optional so could fail if (source_is_p2sh and dust_return_pubkey is None) or (dust_return_pubkey is False and config.P2SH_DUST_RETURN_PUBKEY is None): raise exceptions.TransactionError("Can't use multisig encoding when source is P2SH and no dust_return_pubkey is provided.") elif dust_return_pubkey is False: dust_return_pubkey = binascii.unhexlify(config.P2SH_DUST_RETURN_PUBKEY) # Divide data into chunks. if encoding == 'pubkeyhash': # Prefix is also a suffix here. chunk_size = 20 - 1 - 8 elif encoding == 'multisig': # Two pubkeys, minus length byte, minus prefix, minus two nonces, # minus two sign bytes. chunk_size = (33 * 2) - 1 - 8 - 2 - 2 elif encoding == 'opreturn': chunk_size = config.OP_RETURN_MAX_SIZE if len(data) + len(config.PREFIX) > chunk_size: raise exceptions.TransactionError('One `OP_RETURN` output per transaction.') data_array = list(chunks(data, chunk_size)) # Data outputs. if encoding == 'multisig': data_value = multisig_dust_size elif encoding == 'opreturn': data_value = op_return_value else: # Pay‐to‐PubKeyHash, e.g. data_value = regular_dust_size data_output = (data_array, data_value) if not dust_return_pubkey: if encoding == 'multisig': dust_return_pubkey = get_dust_return_pubkey(source, provided_pubkeys, encoding) else: dust_return_pubkey = None else: data_array = [] data_output = None dust_return_pubkey = None data_btc_out = sum([data_value for data_chunk in data_array]) '''Inputs''' # Calculate collective size of outputs, for fee calculation. p2pkhsize = 25 + 9 if encoding == 'multisig': data_output_size = 81 # 71 for the data elif encoding == 'opreturn': data_output_size = 90 # 80 for the data else: data_output_size = p2pkhsize # Pay‐to‐PubKeyHash (25 for the data?) outputs_size = (p2pkhsize * len(destination_outputs)) + (len(data_array) * data_output_size) # Get inputs. multisig_inputs = not data use_inputs = custom_inputs # Array of UTXOs, as retrieved by listunspent function from bitcoind if custom_inputs is None: if unspent_tx_hash is not None: unspent = backend.get_unspent_txouts(source, unconfirmed=allow_unconfirmed_inputs, unspent_tx_hash=unspent_tx_hash, multisig_inputs=multisig_inputs) else: unspent = backend.get_unspent_txouts(source, unconfirmed=allow_unconfirmed_inputs, multisig_inputs=multisig_inputs) # filter out any locked UTXOs to prevent creating transactions that spend the same UTXO when they're created at the same time if UTXO_LOCKS is not None and source in UTXO_LOCKS: unspentkeys = {make_outkey(output) for output in unspent} filtered_unspentkeys = unspentkeys - UTXO_LOCKS[source].keys() unspent = [output for output in unspent if make_outkey(output) in filtered_unspentkeys] unspent = backend.sort_unspent_txouts(unspent) logger.debug('Sorted candidate UTXOs: {}'.format([print_coin(coin) for coin in unspent])) use_inputs = unspent # use backend estimated fee_per_kb if estimate_fee_per_kb: estimated_fee_per_kb = backend.fee_per_kb(estimate_fee_per_kb_nblocks) if estimated_fee_per_kb is not None: fee_per_kb = max(estimated_fee_per_kb, fee_per_kb) # never drop below the default fee_per_kb logger.debug('Fee/KB {:.8f}'.format(fee_per_kb / config.UNIT)) inputs = [] btc_in = 0 change_quantity = 0 sufficient_funds = False final_fee = fee_per_kb desired_input_count = 1 if encoding == 'multisig' and data_array and util.enabled('bytespersigop'): desired_input_count = len(data_array) * 2 for coin in use_inputs: logger.debug('New input: {}'.format(print_coin(coin))) inputs.append(coin) btc_in += round(coin['amount'] * config.UNIT) size = 181 * len(inputs) + outputs_size + 10 necessary_fee = int(size / 1000 * fee_per_kb) # If exact fee is specified, use that. Otherwise, calculate size of tx # and base fee on that (plus provide a minimum fee for selling BTC). if exact_fee: final_fee = exact_fee else: final_fee = max(fee_provided, necessary_fee) # Check if good. btc_out = destination_btc_out + data_btc_out change_quantity = btc_in - (btc_out + final_fee) logger.debug('Size: {} Fee: {:.8f} Change quantity: {:.8f} BTC'.format(size, final_fee / config.UNIT, change_quantity / config.UNIT)) # If change is necessary, must not be a dust output. if change_quantity == 0 or change_quantity >= regular_dust_size: sufficient_funds = True if len(inputs) >= desired_input_count: break if not sufficient_funds: # Approximate needed change, fee by with most recently calculated # quantities. btc_out = destination_btc_out + data_btc_out total_btc_out = btc_out + max(change_quantity, 0) + final_fee raise exceptions.BalanceError('Insufficient {} at address {}. (Need approximately {} {}.) To spend unconfirmed coins, use the flag `--unconfirmed`. (Unconfirmed coins cannot be spent from multi‐sig addresses.)'.format(config.BTC, source, total_btc_out / config.UNIT, config.BTC)) # Lock the source's inputs (UTXOs) chosen for this transaction if UTXO_LOCKS is not None and not disable_utxo_locks: if source not in UTXO_LOCKS: UTXO_LOCKS[source] = cachetools.TTLCache( UTXO_LOCKS_PER_ADDRESS_MAXSIZE, config.UTXO_LOCKS_MAX_AGE) for input in inputs: UTXO_LOCKS[source][make_outkey(input)] = input logger.debug("UTXO locks: Potentials ({}): {}, Used: {}, locked UTXOs: {}".format( len(unspent), [make_outkey(coin) for coin in unspent], [make_outkey(input) for input in inputs], list(UTXO_LOCKS[source].keys()))) '''Finish''' # Change output. if change_quantity: if script.is_multisig(source): change_address = backend.multisig_pubkeyhashes_to_pubkeys(source, provided_pubkeys) else: change_address = source change_output = (change_address, change_quantity) else: change_output = None # in bitcoin core v0.12.1 a -bytespersigop was added that messes with bare multisig transactions, # as a safeguard we fall back to pubkeyhash encoding when unsure # when len(inputs) > len(data_outputs) there's more bytes:sigops ratio and we can safely continue if encoding == 'multisig' and inputs and data_output and len(inputs) < len(data_array) * 2 and util.enabled('bytespersigop'): # if auto encoding we can do pubkeyhash encoding instead if desired_encoding == 'auto': return construct(db, tx_info, encoding='pubkeyhash', fee_per_kb=fee_per_kb, regular_dust_size=regular_dust_size, multisig_dust_size=multisig_dust_size, op_return_value=op_return_value, exact_fee=exact_fee, fee_provided=fee_provided, provided_pubkeys=provided_pubkeys, allow_unconfirmed_inputs=allow_unconfirmed_inputs, unspent_tx_hash=unspent_tx_hash, custom_inputs=custom_inputs) # otherwise raise exception else: raise exceptions.EncodingError("multisig will be rejected by Bitcoin Core >= v0.12.1, you should use `encoding=auto` or `encoding=pubkeyhash`") # Serialise inputs and outputs. unsigned_tx = serialise(encoding, inputs, destination_outputs, data_output, change_output, dust_return_pubkey=dust_return_pubkey) unsigned_tx_hex = binascii.hexlify(unsigned_tx).decode('utf-8') '''Sanity Check''' from counterpartylib.lib import blocks # Desired transaction info. (desired_source, desired_destination_outputs, desired_data) = tx_info desired_source = script.make_canonical(desired_source) desired_destination = script.make_canonical(desired_destination_outputs[0][0]) if desired_destination_outputs else '' # NOTE: Include change in destinations for BTC transactions. # if change_output and not desired_data and desired_destination != config.UNSPENDABLE: # if desired_destination == '': # desired_destination = desired_source # else: # desired_destination += '-{}'.format(desired_source) # NOTE if desired_data == None: desired_data = b'' # Parsed transaction info. try: parsed_source, parsed_destination, x, y, parsed_data = blocks._get_tx_info(unsigned_tx_hex) except exceptions.BTCOnlyError: # Skip BTC‐only transactions. return unsigned_tx_hex desired_source = script.make_canonical(desired_source) # Check desired info against parsed info. desired = (desired_source, desired_destination, desired_data) parsed = (parsed_source, parsed_destination, parsed_data) if desired != parsed: # Unlock (revert) UTXO locks if UTXO_LOCKS is not None: for input in inputs: UTXO_LOCKS[source].pop(make_outkey(input), None) raise exceptions.TransactionError('Constructed transaction does not parse correctly: {} ≠ {}'.format(desired, parsed)) return unsigned_tx_hex
def construct(db, tx_info, encoding='auto', fee_per_kb=config.DEFAULT_FEE_PER_KB, estimate_fee_per_kb=None, estimate_fee_per_kb_nblocks=config.ESTIMATE_FEE_NBLOCKS, regular_dust_size=config.DEFAULT_REGULAR_DUST_SIZE, multisig_dust_size=config.DEFAULT_MULTISIG_DUST_SIZE, op_return_value=config.DEFAULT_OP_RETURN_VALUE, exact_fee=None, fee_provided=0, provided_pubkeys=None, dust_return_pubkey=None, allow_unconfirmed_inputs=False, unspent_tx_hash=None, custom_inputs=None, disable_utxo_locks=False): if estimate_fee_per_kb is None: estimate_fee_per_kb = config.ESTIMATE_FEE_PER_KB global UTXO_LOCKS desired_encoding = encoding (source, destination_outputs, data) = tx_info if dust_return_pubkey: dust_return_pubkey = binascii.unhexlify(dust_return_pubkey) # Source. # If public key is necessary for construction of (unsigned) # transaction, use the public key provided, or find it from the # blockchain. if source: script.validate(source) source_is_p2sh = script.is_p2sh(source) # Sanity checks. if exact_fee and not isinstance(exact_fee, int): raise exceptions.TransactionError('Exact fees must be in satoshis.') if not isinstance(fee_provided, int): raise exceptions.TransactionError('Fee provided must be in satoshis.') if UTXO_LOCKS is None and config.UTXO_LOCKS_MAX_ADDRESSES > 0: # initialize if configured UTXO_LOCKS = util.DictCache(size=config.UTXO_LOCKS_MAX_ADDRESSES) '''Destinations''' # Destination outputs. # Replace multi‐sig addresses with multi‐sig pubkeys. Check that the # destination output isn’t a dust output. Set null values to dust size. destination_outputs_new = [] for (address, value) in destination_outputs: # Value. if script.is_multisig(address): dust_size = multisig_dust_size else: dust_size = regular_dust_size if value == None: value = dust_size elif value < dust_size: raise exceptions.TransactionError('Destination output is dust.') # Address. script.validate(address) if script.is_multisig(address): destination_outputs_new.append( (backend.multisig_pubkeyhashes_to_pubkeys( address, provided_pubkeys), value)) else: destination_outputs_new.append((address, value)) destination_outputs = destination_outputs_new destination_btc_out = sum( [value for address, value in destination_outputs]) '''Data''' if data: # Data encoding methods (choose and validate). if encoding == 'auto': if len(data) + len(config.PREFIX) <= config.OP_RETURN_MAX_SIZE: encoding = 'opreturn' else: encoding = 'multisig' elif encoding not in ('pubkeyhash', 'multisig', 'opreturn'): raise exceptions.TransactionError('Unknown encoding‐scheme.') if encoding == 'multisig': # dust_return_pubkey should be set or explicitly set to False to use the default configured for the node # the default for the node is optional so could fail if (source_is_p2sh and dust_return_pubkey is None) or ( dust_return_pubkey is False and config.P2SH_DUST_RETURN_PUBKEY is None): raise exceptions.TransactionError( "Can't use multisig encoding when source is P2SH and no dust_return_pubkey is provided." ) elif dust_return_pubkey is False: dust_return_pubkey = binascii.unhexlify( config.P2SH_DUST_RETURN_PUBKEY) # Divide data into chunks. if encoding == 'pubkeyhash': # Prefix is also a suffix here. chunk_size = 20 - 1 - 8 elif encoding == 'multisig': # Two pubkeys, minus length byte, minus prefix, minus two nonces, # minus two sign bytes. chunk_size = (33 * 2) - 1 - 8 - 2 - 2 elif encoding == 'opreturn': chunk_size = config.OP_RETURN_MAX_SIZE if len(data) + len(config.PREFIX) > chunk_size: raise exceptions.TransactionError( 'One `OP_RETURN` output per transaction.') data_array = list(chunks(data, chunk_size)) # Data outputs. if encoding == 'multisig': data_value = multisig_dust_size elif encoding == 'opreturn': data_value = op_return_value else: # Pay‐to‐PubKeyHash, e.g. data_value = regular_dust_size data_output = (data_array, data_value) if not dust_return_pubkey: if encoding == 'multisig': dust_return_pubkey = get_dust_return_pubkey( source, provided_pubkeys, encoding) else: dust_return_pubkey = None else: data_array = [] data_output = None dust_return_pubkey = None data_btc_out = sum([data_value for data_chunk in data_array]) '''Inputs''' # Calculate collective size of outputs, for fee calculation. p2pkhsize = 25 + 9 if encoding == 'multisig': data_output_size = 81 # 71 for the data elif encoding == 'opreturn': data_output_size = 90 # 80 for the data else: data_output_size = p2pkhsize # Pay‐to‐PubKeyHash (25 for the data?) outputs_size = (p2pkhsize * len(destination_outputs)) + (len(data_array) * data_output_size) # Get inputs. multisig_inputs = not data use_inputs = custom_inputs # Array of UTXOs, as retrieved by listunspent function from bitcoind if custom_inputs is None: if unspent_tx_hash is not None: unspent = backend.get_unspent_txouts( source, unconfirmed=allow_unconfirmed_inputs, unspent_tx_hash=unspent_tx_hash, multisig_inputs=multisig_inputs) else: unspent = backend.get_unspent_txouts( source, unconfirmed=allow_unconfirmed_inputs, multisig_inputs=multisig_inputs) # filter out any locked UTXOs to prevent creating transactions that spend the same UTXO when they're created at the same time if UTXO_LOCKS is not None and source in UTXO_LOCKS: unspentkeys = {make_outkey(output) for output in unspent} filtered_unspentkeys = unspentkeys - UTXO_LOCKS[source].keys() unspent = [ output for output in unspent if make_outkey(output) in filtered_unspentkeys ] unspent = backend.sort_unspent_txouts(unspent) logger.debug('Sorted candidate UTXOs: {}'.format( [print_coin(coin) for coin in unspent])) use_inputs = unspent # use backend estimated fee_per_kb if estimate_fee_per_kb: estimated_fee_per_kb = backend.fee_per_kb(estimate_fee_per_kb_nblocks) if estimated_fee_per_kb is not None: fee_per_kb = max( estimated_fee_per_kb, fee_per_kb) # never drop below the default fee_per_kb logger.debug('Fee/KB {:.8f}'.format(fee_per_kb / config.UNIT)) inputs = [] btc_in = 0 change_quantity = 0 sufficient_funds = False final_fee = fee_per_kb desired_input_count = 1 if encoding == 'multisig' and data_array and util.enabled('bytespersigop'): desired_input_count = len(data_array) * 2 for coin in use_inputs: logger.debug('New input: {}'.format(print_coin(coin))) inputs.append(coin) btc_in += round(coin['amount'] * config.UNIT) size = 181 * len(inputs) + outputs_size + 10 necessary_fee = int(size / 1000 * fee_per_kb) # If exact fee is specified, use that. Otherwise, calculate size of tx # and base fee on that (plus provide a minimum fee for selling BTC). if exact_fee: final_fee = exact_fee else: final_fee = max(fee_provided, necessary_fee) # Check if good. btc_out = destination_btc_out + data_btc_out change_quantity = btc_in - (btc_out + final_fee) logger.debug('Size: {} Fee: {:.8f} Change quantity: {:.8f} BTC'.format( size, final_fee / config.UNIT, change_quantity / config.UNIT)) # If change is necessary, must not be a dust output. if change_quantity == 0 or change_quantity >= regular_dust_size: sufficient_funds = True if len(inputs) >= desired_input_count: break if not sufficient_funds: # Approximate needed change, fee by with most recently calculated # quantities. btc_out = destination_btc_out + data_btc_out total_btc_out = btc_out + max(change_quantity, 0) + final_fee raise exceptions.BalanceError( 'Insufficient {} at address {}. (Need approximately {} {}.) To spend unconfirmed coins, use the flag `--unconfirmed`. (Unconfirmed coins cannot be spent from multi‐sig addresses.)' .format(config.BTC, source, total_btc_out / config.UNIT, config.BTC)) # Lock the source's inputs (UTXOs) chosen for this transaction if UTXO_LOCKS is not None and not disable_utxo_locks: if source not in UTXO_LOCKS: UTXO_LOCKS[source] = cachetools.TTLCache( UTXO_LOCKS_PER_ADDRESS_MAXSIZE, config.UTXO_LOCKS_MAX_AGE) for input in inputs: UTXO_LOCKS[source][make_outkey(input)] = input logger.debug( "UTXO locks: Potentials ({}): {}, Used: {}, locked UTXOs: {}". format(len(unspent), [make_outkey(coin) for coin in unspent], [make_outkey(input) for input in inputs], list(UTXO_LOCKS[source].keys()))) '''Finish''' # Change output. if change_quantity: if script.is_multisig(source): change_address = backend.multisig_pubkeyhashes_to_pubkeys( source, provided_pubkeys) else: change_address = source change_output = (change_address, change_quantity) else: change_output = None # in bitcoin core v0.12.1 a -bytespersigop was added that messes with bare multisig transactions, # as a safeguard we fall back to pubkeyhash encoding when unsure # when len(inputs) > len(data_outputs) there's more bytes:sigops ratio and we can safely continue if encoding == 'multisig' and inputs and data_output and len( inputs) < len(data_array) * 2 and util.enabled('bytespersigop'): # if auto encoding we can do pubkeyhash encoding instead if desired_encoding == 'auto': return construct(db, tx_info, encoding='pubkeyhash', fee_per_kb=fee_per_kb, regular_dust_size=regular_dust_size, multisig_dust_size=multisig_dust_size, op_return_value=op_return_value, exact_fee=exact_fee, fee_provided=fee_provided, provided_pubkeys=provided_pubkeys, allow_unconfirmed_inputs=allow_unconfirmed_inputs, unspent_tx_hash=unspent_tx_hash, custom_inputs=custom_inputs) # otherwise raise exception else: raise exceptions.EncodingError( "multisig will be rejected by Bitcoin Core >= v0.12.1, you should use `encoding=auto` or `encoding=pubkeyhash`" ) # Serialise inputs and outputs. unsigned_tx = serialise(encoding, inputs, destination_outputs, data_output, change_output, dust_return_pubkey=dust_return_pubkey) unsigned_tx_hex = binascii.hexlify(unsigned_tx).decode('utf-8') '''Sanity Check''' from counterpartylib.lib import blocks # Desired transaction info. (desired_source, desired_destination_outputs, desired_data) = tx_info desired_source = script.make_canonical(desired_source) desired_destination = script.make_canonical( desired_destination_outputs[0] [0]) if desired_destination_outputs else '' # NOTE: Include change in destinations for BTC transactions. # if change_output and not desired_data and desired_destination != config.UNSPENDABLE: # if desired_destination == '': # desired_destination = desired_source # else: # desired_destination += '-{}'.format(desired_source) # NOTE if desired_data == None: desired_data = b'' # Parsed transaction info. try: parsed_source, parsed_destination, x, y, parsed_data = blocks._get_tx_info( unsigned_tx_hex) except exceptions.BTCOnlyError: # Skip BTC‐only transactions. return unsigned_tx_hex desired_source = script.make_canonical(desired_source) # Check desired info against parsed info. desired = (desired_source, desired_destination, desired_data) parsed = (parsed_source, parsed_destination, parsed_data) if desired != parsed: # Unlock (revert) UTXO locks if UTXO_LOCKS is not None: for input in inputs: UTXO_LOCKS[source].pop(make_outkey(input), None) raise exceptions.TransactionError( 'Constructed transaction does not parse correctly: {} ≠ {}'.format( desired, parsed)) return unsigned_tx_hex
def construct( db, tx_info, encoding='auto', fee_per_kb=config.DEFAULT_FEE_PER_KB, estimate_fee_per_kb=None, estimate_fee_per_kb_conf_target=config.ESTIMATE_FEE_CONF_TARGET, estimate_fee_per_kb_mode=config.ESTIMATE_FEE_MODE, estimate_fee_per_kb_nblocks=config.ESTIMATE_FEE_CONF_TARGET, regular_dust_size=config.DEFAULT_REGULAR_DUST_SIZE, multisig_dust_size=config.DEFAULT_MULTISIG_DUST_SIZE, op_return_value=config.DEFAULT_OP_RETURN_VALUE, exact_fee=None, fee_provided=0, provided_pubkeys=None, dust_return_pubkey=None, allow_unconfirmed_inputs=False, unspent_tx_hash=None, custom_inputs=None, disable_utxo_locks=False, extended_tx_info=False, old_style_api=None, segwit=False, p2sh_source_multisig_pubkeys=None, p2sh_source_multisig_pubkeys_required=None, p2sh_pretx_txid=None, ): if estimate_fee_per_kb is None: estimate_fee_per_kb = config.ESTIMATE_FEE_PER_KB global UTXO_LOCKS, UTXO_P2SH_ENCODING_LOCKS # lazy assign from config, because when set as default it's evaluated before it's configured if old_style_api is None: old_style_api = config.OLD_STYLE_API (source, destination_outputs, data) = tx_info if dust_return_pubkey: dust_return_pubkey = binascii.unhexlify(dust_return_pubkey) if p2sh_source_multisig_pubkeys: p2sh_source_multisig_pubkeys = [ binascii.unhexlify(p) for p in p2sh_source_multisig_pubkeys ] # Source. # If public key is necessary for construction of (unsigned) # transaction, use the public key provided, or find it from the # blockchain. if source: script.validate(source) source_is_p2sh = script.is_p2sh(source) # Normalize source if script.is_multisig(source): source_address = backend.multisig_pubkeyhashes_to_pubkeys( source, provided_pubkeys) else: source_address = source # Sanity checks. if exact_fee and not isinstance(exact_fee, int): raise exceptions.TransactionError('Exact fees must be in satoshis.') if not isinstance(fee_provided, int): raise exceptions.TransactionError('Fee provided must be in satoshis.') '''Determine encoding method''' if data: desired_encoding = encoding # Data encoding methods (choose and validate). if desired_encoding == 'auto': if len(data) + len(config.PREFIX) <= config.OP_RETURN_MAX_SIZE: encoding = 'opreturn' else: encoding = 'p2sh' if not old_style_api and util.enabled( 'p2sh_encoding' ) else 'multisig' # p2sh is not possible with old_style_api elif desired_encoding == 'p2sh' and not util.enabled('p2sh_encoding'): raise exceptions.TransactionError('P2SH encoding not enabled yet') elif encoding not in ('pubkeyhash', 'multisig', 'opreturn', 'p2sh'): raise exceptions.TransactionError('Unknown encoding‐scheme.') else: # no data encoding = None '''Destinations''' # Destination outputs. # Replace multi‐sig addresses with multi‐sig pubkeys. Check that the # destination output isn’t a dust output. Set null values to dust size. destination_outputs_new = [] if encoding != 'p2sh': for (address, value) in destination_outputs: # Value. if script.is_multisig(address): dust_size = multisig_dust_size else: dust_size = regular_dust_size if value == None: value = dust_size elif value < dust_size: raise exceptions.TransactionError( 'Destination output is dust.') # Address. script.validate(address) if script.is_multisig(address): destination_outputs_new.append( (backend.multisig_pubkeyhashes_to_pubkeys( address, provided_pubkeys), value)) else: destination_outputs_new.append((address, value)) destination_outputs = destination_outputs_new destination_btc_out = sum( [value for address, value in destination_outputs]) '''Data''' if data: # @TODO: p2sh encoding require signable dust key if encoding == 'multisig': # dust_return_pubkey should be set or explicitly set to False to use the default configured for the node # the default for the node is optional so could fail if (source_is_p2sh and dust_return_pubkey is None) or ( dust_return_pubkey is False and config.P2SH_DUST_RETURN_PUBKEY is None): raise exceptions.TransactionError( "Can't use multisig encoding when source is P2SH and no dust_return_pubkey is provided." ) elif dust_return_pubkey is False: dust_return_pubkey = binascii.unhexlify( config.P2SH_DUST_RETURN_PUBKEY) # Divide data into chunks. if encoding == 'pubkeyhash': # Prefix is also a suffix here. chunk_size = 20 - 1 - 8 elif encoding == 'multisig': # Two pubkeys, minus length byte, minus prefix, minus two nonces, # minus two sign bytes. chunk_size = (33 * 2) - 1 - 8 - 2 - 2 elif encoding == 'p2sh': chunk_size = p2sh_encoding.maximum_data_chunk_size() elif encoding == 'opreturn': chunk_size = config.OP_RETURN_MAX_SIZE if len(data) + len(config.PREFIX) > chunk_size: raise exceptions.TransactionError( 'One `OP_RETURN` output per transaction.') data_array = list(chunks(data, chunk_size)) # Data outputs. if encoding == 'multisig': data_value = multisig_dust_size elif encoding == 'p2sh': data_value = 0 # this will be calculated later elif encoding == 'opreturn': data_value = op_return_value else: # Pay‐to‐PubKeyHash, e.g. data_value = regular_dust_size data_output = (data_array, data_value) if not dust_return_pubkey: if encoding == 'multisig' or encoding == 'p2sh' and not source_is_p2sh: dust_return_pubkey = get_dust_return_pubkey( source, provided_pubkeys, encoding) else: dust_return_pubkey = None else: data_value = 0 data_array = [] data_output = None dust_return_pubkey = None data_btc_out = data_value * len(data_array) logger.getChild('p2shdebug').debug( 'data_btc_out=%s (data_value=%d len(data_array)=%d)' % (data_btc_out, data_value, len(data_array))) '''Inputs''' btc_in = 0 final_fee = 0 # Calculate collective size of outputs, for fee calculation. p2pkhsize = 25 + 9 if encoding == 'multisig': data_output_size = 81 # 71 for the data elif encoding == 'opreturn': # prefix + data + 10 bytes script overhead data_output_size = len(config.PREFIX) + 10 if data is not None: data_output_size = data_output_size + len(data) else: data_output_size = p2pkhsize # Pay‐to‐PubKeyHash (25 for the data?) outputs_size = (p2pkhsize * len(destination_outputs)) + (len(data_array) * data_output_size) if encoding == 'p2sh': # calculate all the p2sh outputs size_for_fee, datatx_necessary_fee, data_value, data_btc_out = p2sh_encoding.calculate_outputs( destination_outputs, data_array, fee_per_kb) # replace the data value data_output = (data_array, data_value) else: sum_data_output_size = len(data_array) * data_output_size size_for_fee = ( (25 + 9) * len(destination_outputs)) + sum_data_output_size if not (encoding == 'p2sh' and p2sh_pretx_txid): inputs, change_quantity, n_btc_in, n_final_fee = construct_coin_selection( encoding, data_array, source, allow_unconfirmed_inputs, unspent_tx_hash, custom_inputs, fee_per_kb, estimate_fee_per_kb, estimate_fee_per_kb_nblocks, exact_fee, size_for_fee, fee_provided, destination_btc_out, data_btc_out, regular_dust_size, disable_utxo_locks) btc_in = n_btc_in final_fee = n_final_fee else: # when encoding is P2SH and the pretx txid is passed we can skip coinselection inputs, change_quantity = None, None '''Finish''' if change_quantity: change_output = (source_address, change_quantity) else: change_output = None unsigned_pretx_hex = None unsigned_tx_hex = None pretx_txid = None if encoding == 'p2sh': assert not (segwit and p2sh_pretx_txid ) # shouldn't do old style with segwit enabled if p2sh_pretx_txid: pretx_txid = p2sh_pretx_txid if isinstance( p2sh_pretx_txid, bytes) else binascii.unhexlify(p2sh_pretx_txid) unsigned_pretx = None else: destination_value_sum = sum( [value for (destination, value) in destination_outputs]) source_value = destination_value_sum if change_output: # add the difference between source and destination to the change change_value = change_output[1] + (destination_value_sum - source_value) change_output = (change_output[0], change_value) unsigned_pretx = serializer.serialise_p2sh_pretx( inputs, source=source_address, source_value=source_value, data_output=data_output, change_output=change_output, pubkey=dust_return_pubkey, multisig_pubkeys=p2sh_source_multisig_pubkeys, multisig_pubkeys_required=p2sh_source_multisig_pubkeys_required ) unsigned_pretx_hex = binascii.hexlify(unsigned_pretx).decode( 'utf-8') # with segwit we already know the txid and can return both if segwit: #pretx_txid = hashlib.sha256(unsigned_pretx).digest() # this should be segwit txid ptx = CTransaction.stream_deserialize( io.BytesIO(unsigned_pretx)) # could be a non-segwit tx anyways txid_ba = bytearray(ptx.GetTxid()) txid_ba.reverse() pretx_txid = bytes( txid_ba) # gonna leave the malleability problem to upstream logger.getChild('p2shdebug').debug('pretx_txid %s' % pretx_txid) print('pretx txid:', binascii.hexlify(pretx_txid)) if unsigned_pretx: # we set a long lock on this, don't want other TXs to spend from it UTXO_P2SH_ENCODING_LOCKS[make_outkey_vin(unsigned_pretx, 0)] = True # only generate the data TX if we have the pretx txId if pretx_txid: source_input = None if script.is_p2sh(source): source_input = select_any_coin_from_source(source) if not source_input: raise exceptions.TransactionError( 'Unable to select source input for p2sh source address' ) unsigned_datatx = serializer.serialise_p2sh_datatx( pretx_txid, source=source_address, source_input=source_input, destination_outputs=destination_outputs, data_output=data_output, pubkey=dust_return_pubkey, multisig_pubkeys=p2sh_source_multisig_pubkeys, multisig_pubkeys_required=p2sh_source_multisig_pubkeys_required ) unsigned_datatx_hex = binascii.hexlify(unsigned_datatx).decode( 'utf-8') # let the rest of the code work it's magic on the data tx unsigned_tx_hex = unsigned_datatx_hex else: # we're just gonna return the pretx, it doesn't require any of the further checks logger.warn('old_style_api = %s' % old_style_api) return return_result([unsigned_pretx_hex], old_style_api=old_style_api) else: # Serialise inputs and outputs. unsigned_tx = serializer.serialise( encoding, inputs, destination_outputs, data_output, change_output, dust_return_pubkey=dust_return_pubkey) unsigned_tx_hex = binascii.hexlify(unsigned_tx).decode('utf-8') '''Sanity Check''' # Desired transaction info. (desired_source, desired_destination_outputs, desired_data) = tx_info desired_source = script.make_canonical(desired_source) desired_destination = script.make_canonical( desired_destination_outputs[0] [0]) if desired_destination_outputs else '' # NOTE: Include change in destinations for BTC transactions. # if change_output and not desired_data and desired_destination != config.UNSPENDABLE: # if desired_destination == '': # desired_destination = desired_source # else: # desired_destination += '-{}'.format(desired_source) # NOTE if desired_data == None: desired_data = b'' # Parsed transaction info. try: if pretx_txid and unsigned_pretx: backend.cache_pretx(pretx_txid, unsigned_pretx) parsed_source, parsed_destination, x, y, parsed_data, extra = blocks._get_tx_info( unsigned_tx_hex, p2sh_is_segwit=script.is_bech32(desired_source)) if encoding == 'p2sh': # make_canonical can't determine the address, so we blindly change the desired to the parsed desired_source = parsed_source if pretx_txid and unsigned_pretx: backend.clear_pretx(pretx_txid) except exceptions.BTCOnlyError: # Skip BTC‐only transactions. if extended_tx_info: return { 'btc_in': btc_in, 'btc_out': destination_btc_out + data_btc_out, 'btc_change': change_quantity, 'btc_fee': final_fee, 'tx_hex': unsigned_tx_hex, } logger.getChild('p2shdebug').debug('BTC-ONLY') return return_result([unsigned_pretx_hex, unsigned_tx_hex], old_style_api=old_style_api) desired_source = script.make_canonical(desired_source) # Check desired info against parsed info. desired = (desired_source, desired_destination, desired_data) parsed = (parsed_source, parsed_destination, parsed_data) if desired != parsed: # Unlock (revert) UTXO locks if UTXO_LOCKS is not None and inputs: for input in inputs: UTXO_LOCKS[source].pop(make_outkey(input), None) raise exceptions.TransactionError( 'Constructed transaction does not parse correctly: {} ≠ {}'.format( desired, parsed)) if extended_tx_info: return { 'btc_in': btc_in, 'btc_out': destination_btc_out + data_btc_out, 'btc_change': change_quantity, 'btc_fee': final_fee, 'tx_hex': unsigned_tx_hex, } return return_result([unsigned_pretx_hex, unsigned_tx_hex], old_style_api=old_style_api)
def test_p2sh_encoding_manual_multisig_transaction(server_db): source = P2SH_ADDR[0] destination = ADDR[1] with util_test.ConfigContext( OLD_STYLE_API=True), util_test.MockProtocolChangesContext( enhanced_sends=True, p2sh_encoding=True): p2sh_source_multisig_pubkeys_binary = [ binascii.unhexlify(p) for p in [ DP['pubkey'][ADDR[0]], DP['pubkey'][ADDR[1]], DP['pubkey'][ ADDR[2]] ] ] scriptSig, redeemScript, outputScript = p2sh_encoding.make_p2sh_encoding_redeemscript( b'deadbeef01', n=0, pubKey=None, multisig_pubkeys=p2sh_source_multisig_pubkeys_binary, multisig_pubkeys_required=2) redeemScript = bitcoinlib.core.script.CScript(redeemScript) assert repr( redeemScript ) == "CScript([OP_DROP, 2, x('{}'), x('{}'), x('{}'), 3, OP_CHECKMULTISIGVERIFY, 0, OP_DROP, OP_DEPTH, 0, OP_EQUAL])".format( DP['pubkey'][ADDR[0]], DP['pubkey'][ADDR[1]], DP['pubkey'][ADDR[2]]) # setup transaction fee = 20000 fee_per_kb = 50000 pretxhex = api.compose_transaction( server_db, 'send', { 'source': source, 'destination': destination, 'asset': 'XCP', 'quantity': 100, }, p2sh_source_multisig_pubkeys=[ DP['pubkey'][ADDR[0]], DP['pubkey'][ADDR[1]], DP['pubkey'][ADDR[2]] ], p2sh_source_multisig_pubkeys_required=2, encoding='p2sh', fee_per_kb=fee_per_kb, fee=fee) # debugTransaction = bitcoinlib.core.CTransaction.deserialize(binascii.unhexlify(pretxhex)) # store transaction pretxid, _ = util_test.insert_raw_transaction(pretxhex, server_db) logger.debug('pretxid %s' % (pretxid)) # now compose the data transaction result = api.compose_transaction( server_db, 'send', { 'source': source, 'destination': destination, 'asset': 'XCP', 'quantity': 100 }, p2sh_source_multisig_pubkeys=[ DP['pubkey'][ADDR[0]], DP['pubkey'][ADDR[1]], DP['pubkey'][ADDR[2]] ], p2sh_source_multisig_pubkeys_required=2, p2sh_pretx_txid=pretxid, # pass the pretxid encoding='p2sh', fee_per_kb=fee_per_kb) assert not isinstance(result, list) datatxhex = result datatx = bitcoinlib.core.CTransaction.deserialize( binascii.unhexlify(datatxhex)) # parse the transaction parsed_source, parsed_destination, parsed_btc_amount, parsed_fee, parsed_data, extra = blocks._get_tx_info( datatxhex) assert parsed_source == source assert parsed_data == binascii.unhexlify( "00000002" "0000000000000001" "0000000000000064" "6f8d6ae8a3b381663118b4e1eff4cfc7d0954dd6ec" ) # ID=enhanced_send(0x02) ASSET=XCP(0x01) VALUE=100(0x64) destination_pubkey(0x6f8d...d6ec) assert parsed_btc_amount == 0
def test_p2sh_encoding(server_db): source = ADDR[0] destination = ADDR[1] with util_test.ConfigContext( OLD_STYLE_API=True), util_test.MockProtocolChangesContext( enhanced_sends=True, p2sh_encoding=True): utxos = dict(((utxo['txid'], utxo['vout']), utxo) for utxo in backend.get_unspent_txouts(source)) # pprint.pprint(utxos) fee = 20000 fee_per_kb = 50000 result = api.compose_transaction(server_db, 'send', { 'source': source, 'destination': destination, 'asset': 'XCP', 'quantity': 100 }, encoding='p2sh', fee_per_kb=fee_per_kb, fee=fee) assert not isinstance(result, list) pretxhex = result pretx = bitcoinlib.core.CTransaction.deserialize( binascii.unhexlify(pretxhex)) sumvin = sum([ int(utxos[(bitcoinlib.core.b2lx( vin.prevout.hash), vin.prevout.n)]['amount'] * 1e8) for vin in pretx.vin ]) sumvout = sum([vout.nValue for vout in pretx.vout]) assert len(pretx.vout) == 2 assert len(pretxhex) / 2 == 142 assert sumvin == 199909140 assert sumvout < sumvin assert sumvout == (sumvin - fee) # data P2SH output expected_datatx_length = 435 expected_datatx_fee = int(expected_datatx_length / 1000 * fee_per_kb) assert repr( pretx.vout[0].scriptPubKey ) == "CScript([OP_HASH160, x('7698101f9b9e5cdf0a0e11c2972dbc4860f374bf'), OP_EQUAL])" assert pretx.vout[0].nValue == expected_datatx_fee # change output assert pretx.vout[1].nValue == sumvin - expected_datatx_fee - fee assert pretxhex == "0100000001c1d8c075936c3495f6d653c50f73d987f75448d97a750249b1eb83bee71b24ae000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788acffffffff02f65400000000000017a9147698101f9b9e5cdf0a0e11c2972dbc4860f374bf87febbe90b000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788ac00000000" # 01000000 | version # 01 | inputs # c1d8c075936c3495f6d653c50f73d987f75448d97a750249b1eb83bee71b24ae | txout hash # 00000000 | txout index # 19 | script length # 76a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788ac | tx_script # ffffffff | Sequence # 02 | number of outputs # f654000000000000 | output 1 value (21750) # 17 | output 1 length (23 bytes) # a9147698101f9b9e5cdf0a0e11c2972dbc4860f374bf87 | output 1 script # febbe90b00000000 | output 2 value (199867390) # 19 | output 2 length (25 bytes) # 76a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788ac | output 2 script # 00000000 | locktime # first transaction should be considered BTC only with pytest.raises(exceptions.BTCOnlyError): blocks._get_tx_info(pretxhex) # store transaction pretxid, _ = util_test.insert_raw_transaction(pretxhex, server_db) logger.debug('pretxid %s' % (pretxid)) # check that when we do another, unrelated, send that it won't use our UTXO result = api.compose_transaction( server_db, 'send', { 'source': source, 'destination': destination, 'asset': 'XCP', 'quantity': 100 }) othertx = bitcoinlib.core.CTransaction.deserialize( binascii.unhexlify(result)) othertxid = bitcoinlib.core.lx( bitcoinlib.core.b2x(othertx.vin[0].prevout.hash)) # reverse hash assert not (binascii.hexlify(othertxid).decode('ascii') == pretxid and othertx.vin[0].prevout.n == 0) # now compose the data transaction result = api.compose_transaction( server_db, 'send', { 'source': source, 'destination': destination, 'asset': 'XCP', 'quantity': 100 }, p2sh_pretx_txid=pretxid, # pass the pretxid encoding='p2sh', fee_per_kb=fee_per_kb) assert not isinstance(result, list) datatxhex = result datatx = bitcoinlib.core.CTransaction.deserialize( binascii.unhexlify(datatxhex)) sumvin = sum( [pretx.vout[n].nValue for n, vin in enumerate(datatx.vin)]) sumvout = sum([vout.nValue for vout in datatx.vout]) fee = 10000 assert len(datatxhex) / 2 == 190 assert sumvin == expected_datatx_fee assert sumvout < sumvin assert sumvout == sumvin - expected_datatx_fee assert len(datatx.vout) == 1 # opreturn signalling P2SH assert repr( datatx.vout[0].scriptPubKey ) == "CScript([OP_RETURN, x('8a5dda15fb6f0562da344d2f')])" # arc4(PREFIX + 'P2SH') assert datatx.vout[0].nValue == 0 assert datatxhex == "01000000010a0746fe9308ac6e753fb85780a8b788b40655148dcde1435f2048783b784f06000000007431544553545858585800000002000000000000000100000000000000646f8d6ae8a3b381663118b4e1eff4cfc7d0954dd6ec2975210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b0ad007574008717a9147698101f9b9e5cdf0a0e11c2972dbc4860f374bf87ffffffff0100000000000000000e6a0c8a5dda15fb6f0562da344d2f00000000" # 01000000 | version # 01 | inputs # 0a0746fe9308ac6e753fb85780a8b788b40655148dcde1435f2048783b784f06 | txout hash # 00000000 | txout index (0) # 74 | script length (116) # 31544553545858585800000002000000000000000100000000000000 | tx_script # 646f8d6ae8a3b381663118b4e1eff4cfc7d0954dd6ec2975210282b886c087eb37dc8182f14ba6cc | ... # 3e9485ed618b95804d44aecc17c300b585b0ad007574008717a9147698101f9b9e5cdf0a0e11c297 | ... # 2dbc4860f374bf87 | ... # ffffffff | Sequence # 01 | number of outputs # 0000000000000000 | output 1 value (0) # 0e | output 1 length (14 bytes) # 6a0c8a5dda15fb6f0562da344d2f | output 1 script # 00000000 | locktime # verify parsed result parsed_source, parsed_destination, parsed_btc_amount, parsed_fee, parsed_data, extra = blocks._get_tx_info( datatxhex) assert parsed_source == source assert parsed_data == binascii.unhexlify( "00000002" "0000000000000001" "0000000000000064" "6f8d6ae8a3b381663118b4e1eff4cfc7d0954dd6ec" ) # ID=enhanced_send(0x02) ASSET=XCP(0x01) VALUE=100(0x64) destination_pubkey(0x6f8d...d6ec) assert parsed_btc_amount == 0 assert parsed_fee == expected_datatx_fee # check signing pubkey tx_script_start = 8 + 2 + 64 + 8 tx_script_length = int(datatxhex[tx_script_start:tx_script_start + 2], 16) * 2 tx_script = datatxhex[tx_script_start + 2:tx_script_start + 2 + tx_script_length] signing_pubkey_hash = tx_script[-44:-4] address = script.base58_check_encode(signing_pubkey_hash, config.ADDRESSVERSION)
def test_p2sh_encoding_long_data(server_db): source = ADDR[0] destination = ADDR[1] with util_test.ConfigContext( OLD_STYLE_API=True), util_test.MockProtocolChangesContext( enhanced_sends=True, p2sh_encoding=True): utxos = dict(((utxo['txid'], utxo['vout']), utxo) for utxo in backend.get_unspent_txouts(source)) # pprint.pprint(utxos) fee_per_kb = 50000 result = api.compose_transaction( server_db, 'broadcast', { 'source': source, 'text': 'The quick brown fox jumped over the lazy dog. ' * 12, 'fee_fraction': 0, 'timestamp': 1512155862, 'value': 0, }, encoding='p2sh', fee_per_kb=fee_per_kb) assert not isinstance(result, list) pretxhex = result pretx = bitcoinlib.core.CTransaction.deserialize( binascii.unhexlify(pretxhex)) actual_fee = int(len(pretxhex) / 2 * fee_per_kb / 1000) sumvin = sum([ int(utxos[(bitcoinlib.core.b2lx( vin.prevout.hash), vin.prevout.n)]['amount'] * 1e8) for vin in pretx.vin ]) sumvout = sum([vout.nValue for vout in pretx.vout]) pretx_fee = 12950 assert len(pretx.vout) == 3 assert len(pretxhex) / 2 == 174 assert sumvin == 199909140 assert sumvout < sumvin assert sumvout == (sumvin - pretx_fee) # data P2SH output expected_datatx_length = 1156 expected_datatx_fee = int(expected_datatx_length / 1000 * fee_per_kb) expected_datatx_fee_rounded = int(math.ceil( expected_datatx_fee / 2)) * 2 assert repr( pretx.vout[0].scriptPubKey ) == "CScript([OP_HASH160, x('7698101f9b9e5cdf0a0e11c2972dbc4860f374bf'), OP_EQUAL])" assert pretx.vout[0].nValue == int(math.ceil(expected_datatx_fee / 2)) assert pretx.vout[1].nValue == int(math.ceil(expected_datatx_fee / 2)) # change output assert pretx.vout[ 2].nValue == sumvin - expected_datatx_fee_rounded - pretx_fee assert pretxhex == "0100000001c1d8c075936c3495f6d653c50f73d987f75448d97a750249b1eb83bee71b24ae000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788acffffffff03e47000000000000017a9147698101f9b9e5cdf0a0e11c2972dbc4860f374bf87e47000000000000017a914676d587edf25cf01d3b153ff0b71f5e9b622386387b64ae90b000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788ac00000000" # 00000001 | version # 01 | inputs # c1d8c075936c3495f6d653c50f73d987f75448d97a750249b1eb83bee71b24ae | txout hash # 00000000 | txout index # 19 | script length # 76a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788ac | tx_script # ffffffff | Sequence # 03 | number of outputs (3) # e470000000000000 | output 1 value (28900) # 17 | output 1 length (23 bytes) # a9147698101f9b9e5cdf0a0e11c2972dbc4860f374bf87 | output 1 script # e470000000000000 | output 2 value (28900) # 17 | output 2 length (23 bytes) # a914676d587edf25cf01d3b153ff0b71f5e9b622386387 | output 2 script # b64ae90b00000000 | output 3 value (199838390) # 19 | output 3 length (25 bytes) # 76a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788ac | output 3 script # 00000000 | locktime # store transaction pretxid, _ = util_test.insert_raw_transaction(pretxhex, server_db) logger.debug('pretxid %s' % (pretxid)) # now compose the data transaction result = api.compose_transaction( server_db, 'broadcast', { 'source': source, 'text': 'The quick brown fox jumped over the lazy dog. ' * 12, 'fee_fraction': 0, 'timestamp': 1512155862, 'value': 0, }, p2sh_pretx_txid=pretxid, # pass the pretxid encoding='p2sh', fee_per_kb=fee_per_kb) assert not isinstance(result, list) datatxhex = result datatx = bitcoinlib.core.CTransaction.deserialize( binascii.unhexlify(datatxhex)) sumvin = sum( [pretx.vout[n].nValue for n, vin in enumerate(datatx.vin)]) sumvout = sum([vout.nValue for vout in datatx.vout]) assert len(datatx.vin) == 2 assert len(datatxhex) / 2 == 1682 / 2 assert sumvin == expected_datatx_fee_rounded assert sumvout < sumvin assert sumvout == sumvin - expected_datatx_fee_rounded assert len(datatx.vout) == 1 # opreturn signalling P2SH assert repr( datatx.vout[0].scriptPubKey ) == "CScript([OP_RETURN, x('8a5dda15fb6f0562da344d2f')])" # arc4(PREFIX + 'P2SH') assert datatx.vout[0].nValue == 0 assert datatxhex == "0100000002f33f677de4180f1b0c261a991974c57de97f082a7e62332b77ec5d193d13d1a300000000fd4d024d080254455354585858580000001e5a21aad600000000000000000000000054686520717569636b2062726f776e20666f78206a756d706564206f76657220746865206c617a7920646f672e2054686520717569636b2062726f776e20666f78206a756d706564206f76657220746865206c617a7920646f672e2054686520717569636b2062726f776e20666f78206a756d706564206f76657220746865206c617a7920646f672e2054686520717569636b2062726f776e20666f78206a756d706564206f76657220746865206c617a7920646f672e2054686520717569636b2062726f776e20666f78206a756d706564206f76657220746865206c617a7920646f672e2054686520717569636b2062726f776e20666f78206a756d706564206f76657220746865206c617a7920646f672e2054686520717569636b2062726f776e20666f78206a756d706564206f76657220746865206c617a7920646f672e2054686520717569636b2062726f776e20666f78206a756d706564206f76657220746865206c617a7920646f672e2054686520717569636b2062726f776e20666f78206a756d706564206f76657220746865206c617a7920646f672e2054686520717569636b2062726f776e20666f78206a756d706564206f76657220746865206c617a7920646f672e2054686520717569636b2062726f776e20666f78206a756d706564206f766572202975210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b0ad007574008717a9147698101f9b9e5cdf0a0e11c2972dbc4860f374bf87fffffffff33f677de4180f1b0c261a991974c57de97f082a7e62332b77ec5d193d13d1a30100000087445445535458585858746865206c617a7920646f672e2054686520717569636b2062726f776e20666f78206a756d706564206f76657220746865206c617a7920646f672e202975210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b0ad517574008717a914676d587edf25cf01d3b153ff0b71f5e9b622386387ffffffff0100000000000000000e6a0c8a5dda15fb6f0562da344d2f00000000" # 01000000 | version # 02 | inputs # f33f677de4180f1b0c261a991974c57de97f082a7e62332b77ec5d193d13d1a3 | txout hash # 00000000 | txout index (0) # fd | script length (253) # 4d024d080254455354585858580000001e5a21aad6000000000000000000000000 | tx_script # 54686520717569636b2062726f776e20666f78206a756d706564206f76657220746865206c617a7920646f672e20 | ... # 54686520717569636b2062726f776e20666f78206a756d706564206f76657220746865206c617a7920646f672e20 | ... # 54686520717569636b2062726f776e20666f78206a756d706564206f76657220746865206c617a7920646f672e20 | ... # 54686520717569636b2062726f776e20666f78206a756d706564206f76657220746865206c617a7920646f672e20 | ... # 54686520717569636b2062726f776e20666f78206a756d706564206f76657220746865206c617a7920646f672e20 | ... # 54686520717569636b2062726f776e20666f78206a756d706564206f76657220746865206c617a7920646f672e20 | ... # 54686520717569636b2062726f776e20666f78206a756d706564206f76657220746865206c617a7920646f672e20 | ... # 54686520717569636b2062726f776e20666f78206a756d706564206f76657220746865206c617a7920646f672e20 | ... # 54686520717569636b2062726f776e20666f78206a756d706564206f76657220746865206c617a7920646f672e20 | ... # 54686520717569636b2062726f776e20666f78206a756d706564206f76657220746865206c617a7920646f672e20 | ... # 54686520717569636b2062726f776e20666f78206a756d706564206f766572202975210282b886c087eb37dc8182 | ... # f14ba6cc3e9485ed618b95804d44aecc17c300b585b0ad007574008717a9147698101f9b9e5cdf0a0e11c2972dbc | ... # 4860f374bf87 | ... # ffffffff | Sequence # f33f677de4180f1b0c261a991974c57de97f082a7e62332b77ec5d193d13d1a3 | txout hash # 01000000 | txout index (1) # 87 | script length (135) # 445445535458585858746865206c617a7920646f672e20 | tx_script # 54686520717569636b2062726f776e20666f78206a756d706564206f76657220746865206c617a7920646f672e202975 | ... # 210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b0ad517574008717a914676d587edf | ... # 25cf01d3b153ff0b71f5e9b622386387 | ... # ffffffff | Sequence # 01 | number of outputs # 0000000000000000 | output 1 value (0) # 0e | output 1 length (14 bytes) # 6a0c8a5dda15fb6f0562da344d2f | output 1 script # 00000000 | locktime # verify parsed result parsed_source, parsed_destination, parsed_btc_amount, parsed_fee, parsed_data, extra = blocks._get_tx_info( datatxhex) assert parsed_source == source assert parsed_data == binascii.unhexlify( "0000001e5a21aad6000000000000000000000000" ) + b'The quick brown fox jumped over the lazy dog. The quick brown fox jumped over the lazy dog. The quick brown fox jumped over the lazy dog. The quick brown fox jumped over the lazy dog. The quick brown fox jumped over the lazy dog. The quick brown fox jumped over the lazy dog. The quick brown fox jumped over the lazy dog. The quick brown fox jumped over the lazy dog. The quick brown fox jumped over the lazy dog. The quick brown fox jumped over the lazy dog. The quick brown fox jumped over the lazy dog. The quick brown fox jumped over the lazy dog. ' # ID=enhanced_send(0x1e) ASSET=XCP(0x01) VALUE=100(0x64) destination_pubkey(0x6f8d...d6ec) assert parsed_btc_amount == 0 assert parsed_fee == expected_datatx_fee_rounded
def test_p2sh_encoding_composed(server_db): source = ADDR[0] destination = ADDR[1] with util_test.ConfigContext( DISABLE_ARC4_MOCKING=True, OLD_STYLE_API=True), util_test.MockProtocolChangesContext( enhanced_sends=True, p2sh_encoding=True): # BTC Mainnet tx d90dc8637fd2ab9ae39b7c2929c793c5d28d7dea672afb02fb4001637085e9a1 datatxhex = "010000000102d2b137e49e930ef3e436b342713d8d07bd378e773c915a5938993d81dc7e6000000000fdab0147304402207848293e88563750f647e949cb594cdbec0beb4070faac73040d77d479420f8302201e0ac32788e98bd984279102b7382576d7ddb4b125d1d507725cbd12d97a2908014d60014d1401434e5452505254590300010042276049e5518791be2ffe2c301f5dfe9ef85dd0400001720034b0410000000000000001500000006a79811e000000000000000054000079cec1665f4800000000000000050000000ca91f2d660000000000000005402736c8de6e34d54000000000000001500c5e4c71e081ceb00000000000000054000000045dc03ec4000000000000000500004af1271cf5fc00000000000000054001e71f8464432780000000000000015000002e1e4191f0d0000000000000005400012bc4aaac2a54000000000000001500079c7e774e411c00000000000000054000000045dc0a6f00000000000000015000002e1e486f661000000000000000540001c807abe13908000000000000000475410426156245525daa71f2e84a40797bcf28099a2c508662a8a33324a703597b9aa2661a79a82ffb4caaa9b15f4094622fbfa85f8b9dc7381f991f5a265421391cc3ad0075740087ffffffff0100000000000000000e6a0c31d52bf3b404aefaf596cfd000000000" config.PREFIX = b'CNTRPRTY' parsed_source, parsed_destination, parsed_btc_amount, parsed_fee, parsed_data, extra = blocks._get_tx_info( datatxhex) print('!!!!!!!!!!!!!!!!>1') print(parsed_source) print(parsed_destination) print(parsed_btc_amount) print(parsed_fee) print(parsed_data) print(extra) print('!!!!!!!!!!!!!!!!<1')