def validate(address, allow_p2sh=True): """Make sure the address is valid. May throw `AddressError`. """ # Get array of pubkeyhashes to check. if is_multisig(address): pubkeyhashes = pubkeyhash_array(address) else: pubkeyhashes = [address] # Check validity by attempting to decode. for pubkeyhash in pubkeyhashes: try: if util.enabled('segwit_support'): if not is_bech32(pubkeyhash): base58_check_decode(pubkeyhash, config.ADDRESSVERSION) else: base58_check_decode(pubkeyhash, config.ADDRESSVERSION) except VersionByteError as e: if not allow_p2sh: raise e base58_check_decode(pubkeyhash, config.P2SH_ADDRESSVERSION) except Base58Error as e: if not util.enabled('segwit_support') or not is_bech32(pubkeyhash): raise e
def is_vendable(db, asset): if asset == config.XCP: return True # Always vendable. asset = util.resolve_subasset_longname(db, asset) cursor = db.cursor() issuances = list( cursor.execute( '''SELECT vendable, reassignable, listed FROM issuances WHERE (status = ? AND asset = ?) ORDER BY tx_index DESC LIMIT 1''', ('valid', asset))) if (len(issuances) <= 0): return False vendable = issuances[0]['vendable'] # Use the last issuance. reassignable = issuances[0]['reassignable'] listed = issuances[0]['listed'] if not util.enabled('dispensers'): return False elif not util.enabled('enable_vendable_fix') and (reassignable == False or listed == False): return False else: return vendable
def validate (db, source, give_asset, give_quantity, get_asset, get_quantity, expiration, fee_required, block_index): problems = [] cursor = db.cursor() # For SQLite3 if give_quantity > config.MAX_INT or get_quantity > config.MAX_INT or fee_required > config.MAX_INT or block_index + expiration > config.MAX_INT: problems.append('integer overflow') if give_asset == config.BTC and get_asset == config.BTC: problems.append('cannot trade {} for itself'.format(config.BTC)) if not isinstance(give_quantity, int): problems.append('give_quantity must be in satoshis') return problems if not isinstance(get_quantity, int): problems.append('get_quantity must be in satoshis') return problems if not isinstance(fee_required, int): problems.append('fee_required must be in satoshis') return problems if not isinstance(expiration, int): problems.append('expiration must be expressed as an integer block delta') return problems if give_quantity <= 0: problems.append('non‐positive give quantity') if get_quantity <= 0: problems.append('non‐positive get quantity') if fee_required < 0: problems.append('negative fee_required') if expiration < 0: problems.append('negative expiration') if expiration == 0 and not (block_index >= 317500 or config.TESTNET or config.REGTEST): # Protocol change. problems.append('zero expiration') if not give_quantity or not get_quantity: problems.append('zero give or zero get') cursor.execute('SELECT * FROM issuances WHERE (status = ? AND asset = ?) ORDER BY tx_index DESC LIMIT 1', ('valid', give_asset)) valid_give_asset = cursor.fetchone() if give_asset not in (config.BTC, config.XCP): if not valid_give_asset: problems.append('no such asset to give ({})'.format(give_asset)) elif util.enabled('delisted_assets', block_index=block_index) and valid_give_asset['listed'] == 0: problems.append('Delisted asset ({})'.format(give_asset)) elif util.enabled('non_reassignable_assets', block_index=block_index): if valid_give_asset['reassignable'] == 0 and valid_give_asset['issuer'] != source: problems.append('non reassignable asset to give ({})'.format(give_asset)) # Don't check if `get_asset` is reassignable here. The issuer of the non-reassignable asset can be a maker. cursor.execute('select * from issuances where (status = ? and asset = ?) ORDER BY tx_index DESC LIMIT 1', ('valid', get_asset)) valid_get_asset = cursor.fetchone() if get_asset not in (config.BTC, config.XCP): if not valid_get_asset: problems.append('no such asset to get ({})'.format(get_asset)) elif util.enabled('delisted_assets', block_index=block_index) and valid_get_asset['listed'] == 0: problems.append('Delisted asset ({})'.format(get_asset)) if expiration > config.MAX_EXPIRATION: problems.append('expiration overflow') return problems
def test_mock_protocol_changes(cp_server): assert util.enabled('multisig_addresses') == True with util_test.MockProtocolChangesContext(multisig_addresses=False): assert util.enabled('multisig_addresses') == False with util_test.MockProtocolChangesContext(multisig_addresses=None): assert util.enabled('multisig_addresses') == None assert util.enabled('multisig_addresses') == False assert util.enabled('multisig_addresses') == True
def validate(db, source, timestamp, value, fee_fraction_int, text, block_index): problems = [] # For SQLite3 if timestamp > config.MAX_INT or value > config.MAX_INT or fee_fraction_int > config.MAX_INT: problems.append('integer overflow') if util.enabled('max_fee_fraction'): if fee_fraction_int >= config.UNIT: problems.append('fee fraction greater than or equal to 1') else: if fee_fraction_int > 4294967295: problems.append('fee fraction greater than 42.94967295') if timestamp < 0: problems.append('negative timestamp') if not source: problems.append('null source address') # Check previous broadcast in this feed. cursor = db.cursor() broadcasts = list( cursor.execute( '''SELECT * FROM broadcasts WHERE (status = ? AND source = ?) ORDER BY tx_index ASC''', ('valid', source))) cursor.close() if broadcasts: last_broadcast = broadcasts[-1] if last_broadcast['locked']: problems.append('locked feed') elif timestamp <= last_broadcast['timestamp']: problems.append('feed timestamps not monotonically increasing') if not (block_index >= 317500 or config.TESTNET): # Protocol change. if len(text) > 52: problems.append('text too long') if util.enabled('options_require_memo') and text and text.lower( ).startswith('options'): ops_spl = text.split(" ") if len(ops_spl) == 2: try: options_int = int(ops_spl.pop()) if (options_int > config.MAX_INT) or (options_int < 0): problems.append('integer overflow') elif options_int > config.ADDRESS_OPTION_MAX_VALUE: problems.append('options out of range') except: problems.append('options not an integer') return problems
def validate(db, source, timestamp, value, fee_fraction_int, text, block_index): problems = [] # For SQLite3 if timestamp > config.MAX_INT or value > config.MAX_INT or fee_fraction_int > config.MAX_INT: problems.append('integer overflow') if util.enabled('max_fee_fraction'): if fee_fraction_int >= config.UNIT: problems.append('fee fraction greater than or equal to 1') else: if fee_fraction_int > 4294967295: problems.append('fee fraction greater than 42.94967295') if timestamp < 0: problems.append('negative timestamp') if not source: problems.append('null source address') # Check previous broadcast in this feed. cursor = db.cursor() broadcasts = list( cursor.execute( '''SELECT * FROM broadcasts WHERE (status = ? AND source = ?) ORDER BY tx_index ASC''', ('valid', source))) cursor.close() if broadcasts: last_broadcast = broadcasts[-1] if last_broadcast['locked']: problems.append('locked feed') elif timestamp <= last_broadcast['timestamp']: problems.append('feed timestamps not monotonically increasing') if not (block_index >= 317500 or config.TESTNET or config.REGTEST): # Protocol change. if len(text) > 52: problems.append('text too long') if util.enabled('options_require_memo') and text and text.lower( ).startswith('options'): try: # Check for options and if they are valid. options = util.parse_options_from_string(text) if options is not False: util.validate_address_options(options) except exceptions.OptionsError as e: problems.append(str(e)) return problems
def validate (db, source, destination, asset, quantity, memo_bytes, block_index): problems = [] if asset == config.BTC: problems.append('cannot send {}'.format(config.BTC)) if not isinstance(quantity, int): problems.append('quantity must be in satoshis') return problems if quantity < 0: problems.append('negative quantity') if quantity == 0: problems.append('zero quantity') # For SQLite3 if quantity > config.MAX_INT: problems.append('integer overflow') # destination is always required if not destination: problems.append('destination is required') # check memo if memo_bytes is not None and len(memo_bytes) > MAX_MEMO_LENGTH: problems.append('memo is too long') if util.enabled('options_require_memo'): cursor = db.cursor() results = cursor.execute('SELECT options FROM addresses WHERE address=?', (destination,)) if results: result = results.fetchone() if result and util.active_options(result['options'], config.ADDRESS_OPTION_REQUIRE_MEMO): if memo_bytes is None or (len(memo_bytes) == 0): problems.append('destination requires memo') if util.enabled('non_reassignable_assets') and asset != config.BTC and asset != config.XCP: cursor = db.cursor() # verify not senging non-reassignable asset issuances = list(cursor.execute('''SELECT * FROM issuances WHERE asset = ? AND status = ? ORDER BY tx_index DESC LIMIT 1''', (asset, 'valid'))) if not issuances: problems.append('issuance not found (system error?)') elif not issuances[0]['reassignable'] and issuances[0]['issuer'] != source and issuances[0]['issuer'] != destination: problems.append('non-reassignable asset') return problems
def validate (db, source, destination, flags, memo_bytes, block_index): problems = [] if not util.enabled('sweep_send'): problems.append('not activated yet.') if source == destination: problems.append('destination cannot be the same as source') cursor = db.cursor() cursor.execute('''SELECT * FROM balances WHERE (address = ? AND asset = ?)''', (source, config.XCP)) result = cursor.fetchall() if len(result) == 0: problems.append('insufficient XMP balance for sweep. Need %s XMP for antispam fee' % ANTISPAM_FEE_FLOAT) elif result[0]['quantity'] < ANTISPAM_FEE: problems.append('insufficient XMP balance for sweep. Need %s XMP for antispam fee' % ANTISPAM_FEE_FLOAT) if flags > FLAGS_ALL: problems.append('invalid flags %i' % flags) elif not(flags & (FLAG_BALANCES | FLAG_OWNERSHIP)): problems.append('must specify which kind of transfer in flags') if memo_bytes and len(memo_bytes) > MAX_MEMO_LENGTH: problems.append('memo too long') return problems
def validate (db, source, timestamp, value, fee_fraction_int, text, block_index): problems = [] if util.enabled('max_fee_fraction'): if fee_fraction_int >= config.UNIT: problems.append('fee fraction greater than or equal to 1') else: if fee_fraction_int > 4294967295: problems.append('fee fraction greater than 42.94967295') if timestamp < 0: problems.append('negative timestamp') if not source: problems.append('null source address') # Check previous broadcast in this feed. cursor = db.cursor() broadcasts = list(cursor.execute('''SELECT * FROM broadcasts WHERE (status = ? AND source = ?) ORDER BY tx_index ASC''', ('valid', source))) cursor.close() if broadcasts: last_broadcast = broadcasts[-1] if last_broadcast['locked']: problems.append('locked feed') elif timestamp <= last_broadcast['timestamp']: problems.append('feed timestamps not monotonically increasing') if not (block_index >= 317500 or config.TESTNET): # Protocol change. if len(text) > 52: problems.append('text too long') return problems
def pack(message_type_id, block_index=None): # pack message ID into 1 byte if not zero if util.enabled('short_tx_type_id', block_index) and message_type_id > 0 and message_type_id < 256: return struct.pack(config.SHORT_TXTYPE_FORMAT, message_type_id) # pack into 4 bytes return struct.pack(config.TXTYPE_FORMAT, message_type_id)
def make_pubkeyhash(address): """Create a new PubKeyHash.""" if is_multisig(address): signatures_required, pubs, signatures_possible = extract_array(address) pubkeyhashes = [] for pub in pubs: if is_pubkeyhash(pub): pubkeyhash = pub else: pubkeyhash = pubkey_to_pubkeyhash( binascii.unhexlify(bytes(pub, 'utf-8'))) pubkeyhashes.append(pubkeyhash) pubkeyhash_address = construct_array(signatures_required, pubkeyhashes, signatures_possible) else: if util.enabled('segwit_support') and is_bech32(address): pubkeyhash_address = address # Some bech32 addresses are valid base58 data elif is_pubkeyhash(address): pubkeyhash_address = address elif is_p2sh(address): pubkeyhash_address = address else: pubkeyhash_address = pubkey_to_pubkeyhash( binascii.unhexlify(bytes(address, 'utf-8'))) return pubkeyhash_address
def compose(db, source, timestamp, value, fee_fraction, text): # Store the fee fraction as an integer. fee_fraction_int = int(fee_fraction * 1e8) problems = validate(db, source, timestamp, value, fee_fraction_int, text, util.CURRENT_BLOCK_INDEX) if problems: raise exceptions.ComposeError(problems) data = struct.pack(config.TXTYPE_FORMAT, ID) # always use custom length byte instead of problematic usage of 52p format and make sure to encode('utf-8') for length if util.enabled('broadcast_pack_text'): data += struct.pack(FORMAT, timestamp, value, fee_fraction_int) data += VarIntSerializer.serialize(len(text.encode('utf-8'))) data += text.encode('utf-8') else: if len(text) <= 52: curr_format = FORMAT + '{}p'.format(len(text) + 1) else: curr_format = FORMAT + '{}s'.format(len(text)) data += struct.pack(curr_format, timestamp, value, fee_fraction_int, text.encode('utf-8')) return (source, [], data)
def dispense(db, tx): cursor = db.cursor() cursor.execute('SELECT * FROM dispensers WHERE source=:source AND status=:status ORDER BY asset', { 'source': tx['destination'], 'status': STATUS_OPEN }) dispensers = cursor.fetchall() dispense_index = 0 for dispenser in dispensers: satoshirate = dispenser['satoshirate'] give_quantity = dispenser['give_quantity'] if satoshirate > 0 and give_quantity > 0: must_give = int(floor(tx['btc_amount'] / satoshirate)) remaining = int(floor(dispenser['give_remaining'] / give_quantity)) actually_given = min(must_give, remaining) * give_quantity give_remaining = dispenser['give_remaining'] - actually_given assert give_remaining >= 0 # Skip dispense if quantity is 0 if util.enabled('zero_quantity_value_adjustment_1') and actually_given==0: continue util.credit(db, tx['source'], dispenser['asset'], actually_given, action='dispense', event=tx['tx_hash']) dispenser['give_remaining'] = give_remaining if give_remaining < dispenser['give_quantity']: # close the dispenser dispenser['give_remaining'] = 0 if give_remaining > 0: # return the remaining to the owner util.credit(db, dispenser['source'], dispenser['asset'], give_remaining, action='dispenser close', event=tx['tx_hash']) dispenser['status'] = STATUS_CLOSED dispenser['block_index'] = tx['block_index'] dispenser['prev_status'] = STATUS_OPEN cursor.execute('UPDATE DISPENSERS SET give_remaining=:give_remaining, status=:status \ WHERE source=:source AND asset=:asset AND satoshirate=:satoshirate AND give_quantity=:give_quantity AND status=:prev_status', dispenser) bindings = { 'tx_index': tx['tx_index'], 'tx_hash': tx['tx_hash'], 'dispense_index': dispense_index, 'block_index': tx['block_index'], 'source': tx['destination'], 'destination': tx['source'], 'asset': dispenser['asset'], 'dispense_quantity': actually_given, 'dispenser_tx_hash': dispenser['tx_hash'] } sql = 'INSERT INTO dispenses(tx_index, dispense_index, tx_hash, block_index, source, destination, asset, dispense_quantity, dispenser_tx_hash) \ VALUES(:tx_index, :dispense_index, :tx_hash, :block_index, :source, :destination, :asset, :dispense_quantity, :dispenser_tx_hash);' cursor.execute(sql, bindings) dispense_index += 1 cursor.close()
def validate (db, source, asset_, give_quantity, escrow_quantity, mainchainrate, status, block_index): problems = [] asset_id = None if not util.enabled('dispensers'): problems.append('not activated yet.') if asset_ == config.BTC: problems.append('cannot dispense %s' % config.BTC) return None, problems # resolve subassets asset = util.resolve_subasset_longname(db, asset_) if status == STATUS_OPEN: if not issuance.is_vendable(db, asset): problems.append('asset "%s" is not vendable' % asset_) if give_quantity <= 0: problems.append('give_quantity must be positive') if mainchainrate <= 0: problems.append('mainchainrate must be positive') if escrow_quantity < give_quantity: problems.append('escrow_quantity must be greater or equal than give_quantity') elif not(status == STATUS_CLOSED): problems.append('invalid status %i' % status) cursor = db.cursor() cursor.execute('''SELECT quantity FROM balances \ WHERE address = ? and asset = ?''', (source,asset,)) available = cursor.fetchall() if len(available) == 0: problems.append('address doesn\'t has the asset %s' % asset_) elif len(available) >= 1 and available[0]['quantity'] < escrow_quantity: problems.append('address doesn\'t has enough balance of %s (%i < %i)' % (asset_, available[0]['quantity'], escrow_quantity)) else: cursor.execute('''SELECT * FROM dispensers WHERE source = ? AND asset = ? AND status=?''', (source, asset, STATUS_OPEN)) open_dispensers = cursor.fetchall() if status == STATUS_OPEN: if len(open_dispensers) > 0 and open_dispensers[0]['satoshirate'] != mainchainrate: problems.append('address has a dispenser already opened for asset %s with a different mainchainrate' % asset_) if len(open_dispensers) > 0 and open_dispensers[0]['give_quantity'] != give_quantity: problems.append('address has a dispenser already opened for asset %s with a different give_quantity' % asset_) elif status == STATUS_CLOSED: if len(open_dispensers) == 0: problems.append('address doesnt has an open dispenser for asset %s' % asset_) if len(problems) == 0: asset_id = util.generate_asset_id(asset, block_index) if asset_id == 0: problems.append('cannot dispense %s' % asset_) # How can we test this on a test vector? if give_quantity > config.MAX_INT or escrow_quantity > config.MAX_INT or mainchainrate > config.MAX_INT: problems.append('integer overflow') if len(problems) > 0: return None, problems else: return asset_id, None
def dispense(db, tx): cursor = db.cursor() cursor.execute('SELECT * FROM dispensers WHERE source=:source AND status=:status', { 'source': tx['destination'], 'status': STATUS_OPEN }) dispensers = cursor.fetchall() dispense_logs = [] for dispenser in dispensers: # They should be rejected in `validate()`. assert dispenser['satoshirate'] > 0 assert dispenser['give_quantity'] > 0 must_give = int(floor(tx['btc_amount'] / dispenser['satoshirate'])) remaining = int(floor(dispenser['give_remaining'] / dispenser['give_quantity'])) actually_given = min(must_give, remaining) * dispenser['give_quantity'] give_remaining = dispenser['give_remaining'] - actually_given assert give_remaining >= 0 # Skip dispense if quantity is 0 if util.enabled('zero_quantity_value_adjustment_1') and actually_given==0: continue util.credit(db, tx['source'], dispenser['asset'], actually_given, action='dispense', event=tx['tx_hash']) dispense_logs.append({ "tx_index": tx['tx_index'], "tx_hash": tx['tx_hash'], "block_index": tx['block_index'], "source": tx['source'], "destination": tx['destination'], "asset": dispenser['asset'], "must_give": must_give, "remaining": remaining, "actually_given": actually_given, "satoshirate": dispenser['satoshirate'], "dispenser_tx_hash": dispenser['tx_hash'] }) dispenser['give_remaining'] = give_remaining if give_remaining < dispenser['give_quantity']: # close the dispenser dispenser['give_remaining'] = 0 if give_remaining > 0: # return the remaining to the owner util.credit(db, dispenser['source'], dispenser['asset'], give_remaining, action='dispenser close', event=tx['tx_hash']) dispenser['status'] = STATUS_CLOSED dispenser['block_index'] = tx['block_index'] dispenser['prev_status'] = STATUS_OPEN cursor.execute('UPDATE DISPENSERS SET give_remaining=:give_remaining, status=:status \ WHERE source=:source AND asset=:asset AND satoshirate=:satoshirate AND give_quantity=:give_quantity AND status=:prev_status', dispenser) for log in dispense_logs: cursor.execute('INSERT INTO DISPENSES VALUES (:tx_index, :tx_hash, :block_index, :source, :destination, :asset, :must_give, :remaining, :actually_given, :satoshirate, :dispenser_tx_hash)', log)
def compose (db, source, destination, asset, quantity, memo=None, memo_is_hex=False, use_enhanced_send=None): # special case - enhanced_send replaces send by default when it is enabled # but it can be explicitly disabled with an API parameter if util.enabled('enhanced_sends'): if use_enhanced_send is None or use_enhanced_send == True: return enhanced_send.compose(db, source, destination, asset, quantity, memo, memo_is_hex) elif memo is not None or use_enhanced_send == True: raise exceptions.ComposeError('enhanced sends are not enabled') return send1.compose(db, source, destination, asset, quantity)
def expire (db, block_index): cursor = db.cursor() # Expire orders and give refunds for the quantity give_remaining (if non-zero; if not BTC). cursor.execute('''SELECT * FROM orders \ WHERE (status = ? AND expire_index < ?)''', ('open', block_index)) orders = list(cursor) for order in orders: cancel_order(db, order, 'expired', block_index) # Expire order_matches for BTC with no BTC. cursor.execute('''SELECT * FROM order_matches \ WHERE (status = ? and match_expire_index < ?)''', ('pending', block_index)) order_matches = list(cursor) for order_match in order_matches: cancel_order_match(db, order_match, 'expired', block_index) # Expire btc sell order if match expires if util.enabled('btc_sell_expire_on_match_expire'): # Check for other pending order matches involving either tx0_hash or tx1_hash bindings = { 'status': 'pending', 'tx0_hash': order_match['tx0_hash'], 'tx1_hash': order_match['tx1_hash'] } sql='select * from order_matches where status = :status and ((tx0_hash in (:tx0_hash, :tx1_hash)) or ((tx1_hash in (:tx0_hash, :tx1_hash))))' cursor.execute(sql, bindings) order_matches_pending = cursor.fetchall() # Set BTC sell order status as expired only if there are no pending order matches if len(order_matches_pending) == 0: if order_match['backward_asset'] == "BTC" and order_match['status'] == "expired": cursor.execute('''SELECT * FROM orders \ WHERE tx_hash = ?''', (order_match['tx1_hash'],)) cancel_order(db, list(cursor)[0], 'expired', block_index) if order_match['forward_asset'] == "BTC" and order_match['status'] == "expired": cursor.execute('''SELECT * FROM orders \ WHERE tx_hash = ?''', (order_match['tx0_hash'],)) cancel_order(db, list(cursor)[0], 'expired', block_index) if block_index >= 315000 or config.TESTNET or config.REGTEST: # Protocol change. # Re‐match. for order_match in order_matches: cursor.execute('''SELECT * FROM transactions\ WHERE tx_hash = ?''', (order_match['tx0_hash'],)) match(db, list(cursor)[0], block_index) cursor.execute('''SELECT * FROM transactions\ WHERE tx_hash = ?''', (order_match['tx1_hash'],)) match(db, list(cursor)[0], block_index) cursor.close()
def get_tx_info(tx_hex, block_parser=None, block_index=None): """Get the transaction info. Calls one of two subfunctions depending on signature type.""" if not block_index: block_index = util.CURRENT_BLOCK_INDEX try: if util.enabled('multisig_addresses', block_index=block_index): # Protocol change. tx_info = get_tx_info2(tx_hex, block_parser=block_parser) else: tx_info = get_tx_info1(tx_hex, block_index, block_parser=block_parser) except (DecodeError, BTCOnlyError) as e: # NOTE: For debugging, logger.debug('Could not decode: ' + str(e)) tx_info = b'', None, None, None, None return tx_info
def extract_pubkeys(pub): """Assume pubkey if not pubkeyhash. (Check validity later.)""" pubkeys = [] if is_multisig(pub): _, pubs, _ = extract_array(pub) for pub in pubs: if not is_pubkeyhash(pub): pubkeys.append(pub) elif is_p2sh(pub): pass elif util.enabled('segwit_support') and is_bech32(pub): pass else: if not is_pubkeyhash(pub): pubkeys.append(pub) return pubkeys
def get_tx_list(block): raw_transactions = {} tx_hash_list = [] for ctx in block.vtx: if util.enabled('correct_segwit_txids'): hsh = ctx.GetTxid() else: hsh = ctx.GetHash() tx_hash = bitcoinlib.core.b2lx(hsh) raw = ctx.serialize() tx_hash_list.append(tx_hash) raw_transactions[tx_hash] = bitcoinlib.core.b2x(raw) return (tx_hash_list, raw_transactions)
def validate(db, source, destination, asset, quantity, block_index): try: util.get_asset_id(db, asset, block_index) except AssetError: raise ValidateError('asset invalid') try: script.validate(source) except AddressError: raise ValidateError('source address invalid') try: script.validate(destination) except AddressError: raise ValidateError('destination address invalid') if asset == config.BTC: raise ValidateError('cannot send {}'.format(config.BTC)) if type(quantity) != int: raise ValidateError('quantity not integer') if quantity > config.MAX_INT: raise ValidateError('quantity too large') if quantity <= 0: raise ValidateError('quantity non‐positive') if util.get_balance(db, source, asset) < quantity: raise BalanceError('balance insufficient') if util.enabled('options_require_memo'): # Check destination address options cursor = db.cursor() try: results = cursor.execute( 'SELECT options FROM addresses WHERE address=?', (destination, )) if results: result = results.fetchone() if result and result[ 'options'] & config.ADDRESS_OPTION_REQUIRE_MEMO: raise ValidateError('destination requires memo') finally: cursor.close()
def validate(db, source, destination, asset, quantity, memo_bytes, block_index): problems = [] if asset == config.BTC: problems.append('cannot send {}'.format(config.BTC)) if not isinstance(quantity, int): problems.append('quantity must be in satoshis') return problems if quantity < 0: problems.append('negative quantity') if quantity == 0: problems.append('zero quantity') # For SQLite3 if quantity > config.MAX_INT: problems.append('integer overflow') # destination is always required if not destination: problems.append('destination is required') # check memo if memo_bytes is not None and len(memo_bytes) > MAX_MEMO_LENGTH: problems.append('memo is too long') if util.enabled('options_require_memo'): cursor = db.cursor() try: results = cursor.execute( 'SELECT options FROM addresses WHERE address=?', (destination, )) if results: result = results.fetchone() if result and result[ 'options'] & config.ADDRESS_OPTION_REQUIRE_MEMO: if memo_bytes is None or (len(memo_bytes) == 0): problems.append('destination requires memo') finally: cursor.close() return problems
def unpack(packed_data, block_index=None): message_type_id = None message_remainder = None if len(packed_data) > 1: # try to read 1 byte first if util.enabled('short_tx_type_id', block_index): message_type_id = struct.unpack(config.SHORT_TXTYPE_FORMAT, packed_data[:1])[0] if message_type_id > 0: message_remainder = packed_data[1:] return (message_type_id, message_remainder) # First message byte was 0. We will read 4 bytes if len(packed_data) > 4: message_type_id = struct.unpack(config.TXTYPE_FORMAT, packed_data[:4])[0] message_remainder = packed_data[4:] return (message_type_id, message_remainder)
def compose(db, source, destination, asset, quantity, memo=None, memo_is_hex=False, use_enhanced_send=None): # special case - enhanced_send replaces send by default when it is enabled # but it can be explicitly disabled with an API parameter if util.enabled('enhanced_sends'): if use_enhanced_send is None or use_enhanced_send == True: return enhanced_send.compose(db, source, destination, asset, quantity, memo, memo_is_hex) elif memo is not None or use_enhanced_send == True: raise exceptions.ComposeError('enhanced sends are not enabled') return send1.compose(db, source, destination, asset, quantity)
def validate (db, source, destination, asset, quantity, memo_bytes, block_index): problems = [] if asset == config.BTC: problems.append('cannot send {}'.format(config.BTC)) if not isinstance(quantity, int): problems.append('quantity must be in satoshis') return problems if quantity < 0: problems.append('negative quantity') if quantity == 0: problems.append('zero quantity') # For SQLite3 if quantity > config.MAX_INT: problems.append('integer overflow') # destination is always required if not destination: problems.append('destination is required') # check memo if memo_bytes is not None and len(memo_bytes) > MAX_MEMO_LENGTH: problems.append('memo is too long') if util.enabled('options_require_memo'): cursor = db.cursor() try: results = cursor.execute('SELECT options FROM addresses WHERE address=?', (destination,)) if results: result = results.fetchone() if result and util.active_options(result['options'], config.ADDRESS_OPTION_REQUIRE_MEMO): if memo_bytes is None or (len(memo_bytes) == 0): problems.append('destination requires memo') finally: cursor.close() return problems
def validate(db, source, possible_moves, wager, move_random_hash, expiration, block_index): problems = [] if util.enabled('disable_rps'): problems.append('rps disabled') if not isinstance(possible_moves, int): problems.append('possible_moves must be a integer') return problems if not isinstance(wager, int): problems.append('wager must be in satoshis') return problems if not isinstance(expiration, int): problems.append( 'expiration must be expressed as an integer block delta') return problems if not all(c in string.hexdigits for c in move_random_hash): problems.append('move_random_hash must be an hexadecimal string') return problems move_random_hash_bytes = binascii.unhexlify(move_random_hash) if possible_moves < 3: problems.append('possible moves must be at least 3') if possible_moves % 2 == 0: problems.append('possible moves must be odd') if wager <= 0: problems.append('non‐positive wager') if expiration < 0: problems.append('negative expiration') if expiration == 0 and not (block_index >= 317500 or config.TESTNET or config.REGTEST): # Protocol change. problems.append('zero expiration') if expiration > config.MAX_EXPIRATION: problems.append('expiration overflow') if len(move_random_hash_bytes) != 32: problems.append( 'move_random_hash must be 32 bytes in hexadecimal format') return problems
def make_pubkeyhash(address): """Create a new PubKeyHash.""" if is_multisig(address): signatures_required, pubs, signatures_possible = extract_array(address) pubkeyhashes = [] for pub in pubs: if is_pubkeyhash(pub): pubkeyhash = pub else: pubkeyhash = pubkey_to_pubkeyhash(binascii.unhexlify(bytes(pub, 'utf-8'))) pubkeyhashes.append(pubkeyhash) pubkeyhash_address = construct_array(signatures_required, pubkeyhashes, signatures_possible) else: if util.enabled('segwit_support') and is_bech32(address): pubkeyhash_address = address # Some bech32 addresses are valid base58 data elif is_pubkeyhash(address): pubkeyhash_address = address elif is_p2sh(address): pubkeyhash_address = address else: pubkeyhash_address = pubkey_to_pubkeyhash(binascii.unhexlify(bytes(address, 'utf-8'))) return pubkeyhash_address
def validate(db, source, timestamp, value, fee_fraction_int, text, block_index): problems = [] # For SQLite3 if timestamp > config.MAX_INT or value > config.MAX_INT or fee_fraction_int > config.MAX_INT: problems.append("integer overflow") if util.enabled("max_fee_fraction"): if fee_fraction_int >= config.UNIT: problems.append("fee fraction greater than or equal to 1") else: if fee_fraction_int > 4294967295: problems.append("fee fraction greater than 42.94967295") if timestamp < 0: problems.append("negative timestamp") if not source: problems.append("null source address") # Check previous broadcast in this feed. cursor = db.cursor() broadcasts = list( cursor.execute( """SELECT * FROM broadcasts WHERE (status = ? AND source = ?) ORDER BY tx_index ASC""", ("valid", source) ) ) cursor.close() if broadcasts: last_broadcast = broadcasts[-1] if last_broadcast["locked"]: problems.append("locked feed") elif timestamp <= last_broadcast["timestamp"]: problems.append("feed timestamps not monotonically increasing") if not (block_index >= 317500 or config.TESTNET): # Protocol change. if len(text) > 52: problems.append("text too long") return problems
def compose (db, source, timestamp, value, fee_fraction, text): # Store the fee fraction as an integer. fee_fraction_int = int(fee_fraction * 1e8) problems = validate(db, source, timestamp, value, fee_fraction_int, text, util.CURRENT_BLOCK_INDEX) if problems: raise exceptions.ComposeError(problems) data = struct.pack(config.TXTYPE_FORMAT, ID) # always use custom length byte instead of problematic usage of 52p format and make sure to encode('utf-8') for length if util.enabled('broadcast_pack_text'): data += struct.pack(FORMAT, timestamp, value, fee_fraction_int) data += VarIntSerializer.serialize(len(text.encode('utf-8'))) data += text.encode('utf-8') else: if len(text) <= 52: curr_format = FORMAT + '{}p'.format(len(text) + 1) else: curr_format = FORMAT + '{}s'.format(len(text)) data += struct.pack(curr_format, timestamp, value, fee_fraction_int, text.encode('utf-8')) return (source, [], data)
def compose(db, source, asset_dest_quant_list, memo, memo_is_hex): cursor = db.cursor() out_balances = util.accumulate([(t[0], t[2]) for t in asset_dest_quant_list]) for (asset, quantity) in out_balances: if util.enabled('mpma_subasset_support'): # resolve subassets asset = util.resolve_subasset_longname(db, asset) if not isinstance(quantity, int): raise exceptions.ComposeError( 'quantities must be an int (in satoshis) for {}'.format(asset)) balances = list( cursor.execute( '''SELECT * FROM balances WHERE (address = ? AND asset = ?)''', (source, asset))) if not balances or balances[0]['quantity'] < quantity: raise exceptions.ComposeError( 'insufficient funds for {}'.format(asset)) block_index = util.CURRENT_BLOCK_INDEX cursor.close() problems = validate(db, source, asset_dest_quant_list, block_index) if problems: raise exceptions.ComposeError(problems) data = message_type.pack(ID) data += _encode_mpmaSend(db, asset_dest_quant_list, block_index, memo=memo, memo_is_hex=memo_is_hex) return (source, [], data)
def validate (db, source, possible_moves, wager, move_random_hash, expiration, block_index): problems = [] if util.enabled('disable_rps'): problems.append('rps disabled') if not isinstance(possible_moves, int): problems.append('possible_moves must be a integer') return problems if not isinstance(wager, int): problems.append('wager must be in satoshis') return problems if not isinstance(expiration, int): problems.append('expiration must be expressed as an integer block delta') return problems if not all(c in string.hexdigits for c in move_random_hash): problems.append('move_random_hash must be an hexadecimal string') return problems move_random_hash_bytes = binascii.unhexlify(move_random_hash) if possible_moves < 3: problems.append('possible moves must be at least 3') if possible_moves % 2 == 0: problems.append('possible moves must be odd') if wager <= 0: problems.append('non‐positive wager') if expiration < 0: problems.append('negative expiration') if expiration == 0 and not (block_index >= 317500 or config.TESTNET or config.REGTEST): # Protocol change. problems.append('zero expiration') if expiration > config.MAX_EXPIRATION: problems.append('expiration overflow') if len(move_random_hash_bytes) != 32: problems.append('move_random_hash must be 32 bytes in hexadecimal format') return problems
def validate (db, source, quantity_per_unit, asset, dividend_asset, block_index): cursor = db.cursor() problems = [] if asset == config.BTC: problems.append('cannot pay dividends to holders of {}'.format(config.BTC)) if asset == config.XCP: if (not block_index >= 317500) or block_index >= 320000 or config.TESTNET or config.REGTEST: # Protocol change. problems.append('cannot pay dividends to holders of {}'.format(config.XCP)) if quantity_per_unit <= 0: problems.append('non‐positive quantity per unit') # For SQLite3 if quantity_per_unit > config.MAX_INT: problems.append('integer overflow') # Examine asset. issuances = list(cursor.execute('''SELECT * FROM issuances WHERE (status = ? AND asset = ?) ORDER BY tx_index ASC''', ('valid', asset))) if not issuances: problems.append('no such asset, {}.'.format(asset)) return None, None, problems, 0 divisible = issuances[0]['divisible'] # Only issuer can pay dividends. if block_index >= 320000 or config.TESTNET or config.REGTEST: # Protocol change. if issuances[-1]['issuer'] != source: problems.append('only issuer can pay dividends') # Examine dividend asset. if dividend_asset in (config.BTC, config.XCP): dividend_divisible = True else: issuances = list(cursor.execute('''SELECT * FROM issuances WHERE (status = ? AND asset = ?)''', ('valid', dividend_asset))) if not issuances: problems.append('no such dividend asset, {}.'.format(dividend_asset)) return None, None, problems, 0 dividend_divisible = issuances[0]['divisible'] # Calculate dividend quantities. exclude_empty = False if util.enabled('zero_quantity_value_adjustment_1'): exclude_empty = True holders = util.holders(db, asset, exclude_empty) outputs = [] addresses = [] dividend_total = 0 for holder in holders: if block_index < 294500 and not (config.TESTNET or config.REGTEST): # Protocol change. if holder['escrow']: continue address = holder['address'] address_quantity = holder['address_quantity'] if block_index >= 296000 or config.TESTNET or config.REGTEST: # Protocol change. if address == source: continue dividend_quantity = address_quantity * quantity_per_unit if divisible: dividend_quantity /= config.UNIT if not util.enabled('nondivisible_dividend_fix') and not dividend_divisible: dividend_quantity /= config.UNIT # Pre-fix behaviour if dividend_asset == config.BTC and dividend_quantity < config.DEFAULT_MULTISIG_DUST_SIZE: continue # A bit hackish. dividend_quantity = int(dividend_quantity) outputs.append({'address': address, 'address_quantity': address_quantity, 'dividend_quantity': dividend_quantity}) addresses.append(address) dividend_total += dividend_quantity if not dividend_total: problems.append('zero dividend') if dividend_asset != config.BTC: dividend_balances = list(cursor.execute('''SELECT * FROM balances WHERE (address = ? AND asset = ?)''', (source, dividend_asset))) if not dividend_balances or dividend_balances[0]['quantity'] < dividend_total: problems.append('insufficient funds ({})'.format(dividend_asset)) fee = 0 if not problems and dividend_asset != config.BTC: holder_count = len(set(addresses)) if block_index >= 330000 or config.TESTNET or config.REGTEST: # Protocol change. fee = int(0.0002 * config.UNIT * holder_count) if fee: balances = list(cursor.execute('''SELECT * FROM balances WHERE (address = ? AND asset = ?)''', (source, config.XCP))) if not balances or balances[0]['quantity'] < fee: problems.append('insufficient funds ({})'.format(config.XCP)) if not problems and dividend_asset == config.XCP: total_cost = dividend_total + fee if not dividend_balances or dividend_balances[0]['quantity'] < total_cost: problems.append('insufficient funds ({})'.format(dividend_asset)) # For SQLite3 if fee > config.MAX_INT or dividend_total > config.MAX_INT: problems.append('integer overflow') cursor.close() return dividend_total, outputs, problems, fee
def parse(db, tx, message): cursor = db.cursor() # Unpack message. try: if util.enabled("broadcast_pack_text"): timestamp, value, fee_fraction_int, rawtext = struct.unpack( FORMAT + "{}s".format(len(message) - LENGTH), message ) textlen = VarIntSerializer.deserialize(rawtext) text = rawtext[-textlen:] assert len(text) == textlen else: if len(message) - LENGTH <= 52: curr_format = FORMAT + "{}p".format(len(message) - LENGTH) else: curr_format = FORMAT + "{}s".format(len(message) - LENGTH) timestamp, value, fee_fraction_int, text = struct.unpack(curr_format, message) try: text = text.decode("utf-8") except UnicodeDecodeError: text = "" status = "valid" except (struct.error) as e: timestamp, value, fee_fraction_int, text = 0, None, 0, None status = "invalid: could not unpack" if status == "valid": # For SQLite3 timestamp = min(timestamp, config.MAX_INT) value = min(value, config.MAX_INT) problems = validate(db, tx["source"], timestamp, value, fee_fraction_int, text, tx["block_index"]) if problems: status = "invalid: " + "; ".join(problems) # Lock? lock = False if text and text.lower() == "lock": lock = True timestamp, value, fee_fraction_int, text = 0, None, None, None else: lock = False # Add parsed transaction to message-type–specific table. bindings = { "tx_index": tx["tx_index"], "tx_hash": tx["tx_hash"], "block_index": tx["block_index"], "source": tx["source"], "timestamp": timestamp, "value": value, "fee_fraction_int": fee_fraction_int, "text": text, "locked": lock, "status": status, } if "integer overflow" not in status: sql = "insert into broadcasts values(:tx_index, :tx_hash, :block_index, :source, :timestamp, :value, :fee_fraction_int, :text, :locked, :status)" cursor.execute(sql, bindings) else: logger.warn("Not storing [broadcast] tx [%s]: %s" % (tx["tx_hash"], status)) logger.debug("Bindings: %s" % (json.dumps(bindings),)) # stop processing if broadcast is invalid for any reason if util.enabled("broadcast_invalid_check") and status != "valid": return # Negative values (default to ignore). if value is None or value < 0: # Cancel Open Bets? if value == -2: cursor.execute( """SELECT * FROM bets \ WHERE (status = ? AND feed_address = ?)""", ("open", tx["source"]), ) for i in list(cursor): bet.cancel_bet(db, i, "dropped", tx["block_index"]) # Cancel Pending Bet Matches? if value == -3: cursor.execute( """SELECT * FROM bet_matches \ WHERE (status = ? AND feed_address = ?)""", ("pending", tx["source"]), ) for bet_match in list(cursor): bet.cancel_bet_match(db, bet_match, "dropped", tx["block_index"]) cursor.close() return # stop processing if broadcast is invalid for any reason # @TODO: remove this check once broadcast_invalid_check has been activated if util.enabled("max_fee_fraction") and status != "valid": return # Handle bet matches that use this feed. cursor.execute( """SELECT * FROM bet_matches \ WHERE (status=? AND feed_address=?) ORDER BY tx1_index ASC, tx0_index ASC""", ("pending", tx["source"]), ) for bet_match in cursor.fetchall(): broadcast_bet_match_cursor = db.cursor() bet_match_id = util.make_id(bet_match["tx0_hash"], bet_match["tx1_hash"]) bet_match_status = None # Calculate total funds held in escrow and total fee to be paid if # the bet match is settled. Escrow less fee is amount to be paid back # to betters. total_escrow = bet_match["forward_quantity"] + bet_match["backward_quantity"] fee_fraction = fee_fraction_int / config.UNIT fee = int(fee_fraction * total_escrow) # Truncate. escrow_less_fee = total_escrow - fee # Get known bet match type IDs. cfd_type_id = util.BET_TYPE_ID["BullCFD"] + util.BET_TYPE_ID["BearCFD"] equal_type_id = util.BET_TYPE_ID["Equal"] + util.BET_TYPE_ID["NotEqual"] # Get the bet match type ID of this bet match. bet_match_type_id = bet_match["tx0_bet_type"] + bet_match["tx1_bet_type"] # Contract for difference, with determinate settlement date. if bet_match_type_id == cfd_type_id: # Recognise tx0, tx1 as the bull, bear (in the right direction). if bet_match["tx0_bet_type"] < bet_match["tx1_bet_type"]: bull_address = bet_match["tx0_address"] bear_address = bet_match["tx1_address"] bull_escrow = bet_match["forward_quantity"] bear_escrow = bet_match["backward_quantity"] else: bull_address = bet_match["tx1_address"] bear_address = bet_match["tx0_address"] bull_escrow = bet_match["backward_quantity"] bear_escrow = bet_match["forward_quantity"] leverage = Fraction(bet_match["leverage"], 5040) initial_value = bet_match["initial_value"] bear_credit = bear_escrow - (value - initial_value) * leverage * config.UNIT bull_credit = escrow_less_fee - bear_credit bear_credit = round(bear_credit) bull_credit = round(bull_credit) # Liquidate, as necessary. if bull_credit >= escrow_less_fee or bull_credit <= 0: if bull_credit >= escrow_less_fee: bull_credit = escrow_less_fee bear_credit = 0 bet_match_status = "settled: liquidated for bull" util.credit( db, bull_address, config.XCP, bull_credit, action="bet {}".format(bet_match_status), event=tx["tx_hash"], ) elif bull_credit <= 0: bull_credit = 0 bear_credit = escrow_less_fee bet_match_status = "settled: liquidated for bear" util.credit( db, bear_address, config.XCP, bear_credit, action="bet {}".format(bet_match_status), event=tx["tx_hash"], ) # Pay fee to feed. util.credit(db, bet_match["feed_address"], config.XCP, fee, action="feed fee", event=tx["tx_hash"]) # For logging purposes. bindings = { "bet_match_id": bet_match_id, "bet_match_type_id": bet_match_type_id, "block_index": tx["block_index"], "settled": False, "bull_credit": bull_credit, "bear_credit": bear_credit, "winner": None, "escrow_less_fee": None, "fee": fee, } sql = "insert into bet_match_resolutions values(:bet_match_id, :bet_match_type_id, :block_index, :settled, :bull_credit, :bear_credit, :winner, :escrow_less_fee, :fee)" cursor.execute(sql, bindings) # Settle (if not liquidated). elif timestamp >= bet_match["deadline"]: bet_match_status = "settled" util.credit( db, bull_address, config.XCP, bull_credit, action="bet {}".format(bet_match_status), event=tx["tx_hash"], ) util.credit( db, bear_address, config.XCP, bear_credit, action="bet {}".format(bet_match_status), event=tx["tx_hash"], ) # Pay fee to feed. util.credit(db, bet_match["feed_address"], config.XCP, fee, action="feed fee", event=tx["tx_hash"]) # For logging purposes. bindings = { "bet_match_id": bet_match_id, "bet_match_type_id": bet_match_type_id, "block_index": tx["block_index"], "settled": True, "bull_credit": bull_credit, "bear_credit": bear_credit, "winner": None, "escrow_less_fee": None, "fee": fee, } sql = "insert into bet_match_resolutions values(:bet_match_id, :bet_match_type_id, :block_index, :settled, :bull_credit, :bear_credit, :winner, :escrow_less_fee, :fee)" cursor.execute(sql, bindings) # Equal[/NotEqual] bet. elif bet_match_type_id == equal_type_id and timestamp >= bet_match["deadline"]: # Recognise tx0, tx1 as the bull, bear (in the right direction). if bet_match["tx0_bet_type"] < bet_match["tx1_bet_type"]: equal_address = bet_match["tx0_address"] notequal_address = bet_match["tx1_address"] else: equal_address = bet_match["tx1_address"] notequal_address = bet_match["tx0_address"] # Decide who won, and credit appropriately. if value == bet_match["target_value"]: winner = "Equal" bet_match_status = "settled: for equal" util.credit( db, equal_address, config.XCP, escrow_less_fee, action="bet {}".format(bet_match_status), event=tx["tx_hash"], ) else: winner = "NotEqual" bet_match_status = "settled: for notequal" util.credit( db, notequal_address, config.XCP, escrow_less_fee, action="bet {}".format(bet_match_status), event=tx["tx_hash"], ) # Pay fee to feed. util.credit(db, bet_match["feed_address"], config.XCP, fee, action="feed fee", event=tx["tx_hash"]) # For logging purposes. bindings = { "bet_match_id": bet_match_id, "bet_match_type_id": bet_match_type_id, "block_index": tx["block_index"], "settled": None, "bull_credit": None, "bear_credit": None, "winner": winner, "escrow_less_fee": escrow_less_fee, "fee": fee, } sql = "insert into bet_match_resolutions values(:bet_match_id, :bet_match_type_id, :block_index, :settled, :bull_credit, :bear_credit, :winner, :escrow_less_fee, :fee)" cursor.execute(sql, bindings) # Update the bet match’s status. if bet_match_status: bindings = { "status": bet_match_status, "bet_match_id": util.make_id(bet_match["tx0_hash"], bet_match["tx1_hash"]), } sql = "update bet_matches set status = :status where id = :bet_match_id" cursor.execute(sql, bindings) log.message(db, tx["block_index"], "update", "bet_matches", bindings) broadcast_bet_match_cursor.close() cursor.close()
def validate (db, source, destination, asset, quantity, divisible, callable_, call_date, call_price, description, block_index): problems = [] fee = 0 if asset in (config.BTC, config.XCP): problems.append('cannot issue {} or {}'.format(config.BTC, config.XCP)) if call_date is None: call_date = 0 if call_price is None: call_price = 0.0 if description is None: description = "" if divisible is None: divisible = True if isinstance(call_price, int): call_price = float(call_price) #^ helps especially with calls from JS‐based clients, where parseFloat(15) returns 15 (not 15.0), which json takes as an int if not isinstance(quantity, int): problems.append('quantity must be in satoshis') return call_date, call_price, problems, fee, description, divisible, None if call_date and not isinstance(call_date, int): problems.append('call_date must be epoch integer') return call_date, call_price, problems, fee, description, divisible, None if call_price and not isinstance(call_price, float): problems.append('call_price must be a float') return call_date, call_price, problems, fee, description, divisible, None if quantity < 0: problems.append('negative quantity') if call_price < 0: problems.append('negative call price') if call_date < 0: problems.append('negative call date') # Callable, or not. if not callable_: if block_index >= 312500 or config.TESTNET: # Protocol change. call_date = 0 call_price = 0.0 elif block_index >= 310000: # Protocol change. if call_date: problems.append('call date for non‐callable asset') if call_price: problems.append('call price for non‐callable asset') # Valid re-issuance? cursor = db.cursor() cursor.execute('''SELECT * FROM issuances \ WHERE (status = ? AND asset = ?) ORDER BY tx_index ASC''', ('valid', asset)) issuances = cursor.fetchall() cursor.close() if issuances: reissuance = True last_issuance = issuances[-1] if last_issuance['issuer'] != source: problems.append('issued by another address') if bool(last_issuance['divisible']) != bool(divisible): problems.append('cannot change divisibility') if bool(last_issuance['callable']) != bool(callable_): problems.append('cannot change callability') if last_issuance['call_date'] > call_date and (call_date != 0 or (block_index < 312500 and not config.TESTNET)): problems.append('cannot advance call date') if last_issuance['call_price'] > call_price: problems.append('cannot reduce call price') if last_issuance['locked'] and quantity: problems.append('locked asset and non‐zero quantity') else: reissuance = False if description.lower() == 'lock': problems.append('cannot lock a non‐existent asset') if destination: problems.append('cannot transfer a non‐existent asset') # Check for existence of fee funds. if quantity or (block_index >= 315000 or config.TESTNET): # Protocol change. if not reissuance or (block_index < 310000 and not config.TESTNET): # Pay fee only upon first issuance. (Protocol change.) cursor = db.cursor() cursor.execute('''SELECT * FROM balances \ WHERE (address = ? AND asset = ?)''', (source, config.XCP)) balances = cursor.fetchall() cursor.close() if util.enabled('numeric_asset_names'): # Protocol change. if len(asset) >= 13: fee = 0 else: fee = int(0.5 * config.UNIT) elif block_index >= 291700 or config.TESTNET: # Protocol change. fee = int(0.5 * config.UNIT) elif block_index >= 286000 or config.TESTNET: # Protocol change. fee = 5 * config.UNIT elif block_index > 281236 or config.TESTNET: # Protocol change. fee = 5 if fee and (not balances or balances[0]['quantity'] < fee): problems.append('insufficient funds') if not (block_index >= 317500 or config.TESTNET): # Protocol change. if len(description) > 42: problems.append('description too long') # For SQLite3 call_date = min(call_date, config.MAX_INT) total = sum([issuance['quantity'] for issuance in issuances]) assert isinstance(quantity, int) if total + quantity > config.MAX_INT: problems.append('total quantity overflow') if destination and quantity: problems.append('cannot issue and transfer simultaneously') # For SQLite3 if util.enabled('integer_overflow_fix', block_index=block_index) and (fee > config.MAX_INT or quantity > config.MAX_INT): problems.append('integer overflow') return call_date, call_price, problems, fee, description, divisible, reissuance
def parse (db, tx, message): issuance_parse_cursor = db.cursor() # Unpack message. try: if (tx['block_index'] > 283271 or config.TESTNET) and len(message) >= LENGTH_2: # Protocol change. if len(message) - LENGTH_2 <= 42: curr_format = FORMAT_2 + '{}p'.format(len(message) - LENGTH_2) else: curr_format = FORMAT_2 + '{}s'.format(len(message) - LENGTH_2) asset_id, quantity, divisible, callable_, call_date, call_price, description = struct.unpack(curr_format, message) call_price = round(call_price, 6) # TODO: arbitrary try: description = description.decode('utf-8') except UnicodeDecodeError: description = '' else: if len(message) != LENGTH_1: raise exceptions.UnpackError asset_id, quantity, divisible = struct.unpack(FORMAT_1, message) callable_, call_date, call_price, description = False, 0, 0.0, '' try: asset = util.generate_asset_name(asset_id, tx['block_index']) except exceptions.AssetNameError: asset = None status = 'invalid: bad asset name' status = 'valid' except exceptions.UnpackError as e: asset, quantity, divisible, callable_, call_date, call_price, description = None, None, None, None, None, None, None status = 'invalid: could not unpack' fee = 0 if status == 'valid': call_date, call_price, problems, fee, description, divisible, reissuance = validate(db, tx['source'], tx['destination'], asset, quantity, divisible, callable_, call_date, call_price, description, block_index=tx['block_index']) if problems: status = 'invalid: ' + '; '.join(problems) if not util.enabled('integer_overflow_fix', block_index=tx['block_index']) and 'total quantity overflow' in problems: quantity = 0 if tx['destination']: issuer = tx['destination'] transfer = True quantity = 0 else: issuer = tx['source'] transfer = False # Debit fee. if status == 'valid': util.debit(db, tx['source'], config.XCP, fee, action="issuance fee", event=tx['tx_hash']) # Lock? lock = False if status == 'valid': if description and description.lower() == 'lock': lock = True cursor = db.cursor() issuances = list(cursor.execute('''SELECT * FROM issuances \ WHERE (status = ? AND asset = ?) ORDER BY tx_index ASC''', ('valid', asset))) cursor.close() description = issuances[-1]['description'] # Use last description. (Assume previous issuance exists because tx is valid.) timestamp, value_int, fee_fraction_int = None, None, None if not reissuance: # Add to table of assets. bindings= { 'asset_id': str(asset_id), 'asset_name': str(asset), 'block_index': tx['block_index'], } sql='insert into assets values(:asset_id, :asset_name, :block_index)' issuance_parse_cursor.execute(sql, bindings) # Add parsed transaction to message-type–specific table. bindings= { 'tx_index': tx['tx_index'], 'tx_hash': tx['tx_hash'], 'block_index': tx['block_index'], 'asset': asset, 'quantity': quantity, 'divisible': divisible, 'source': tx['source'], 'issuer': issuer, 'transfer': transfer, 'callable': callable_, 'call_date': call_date, 'call_price': call_price, 'description': description, 'fee_paid': fee, 'locked': lock, 'status': status, } if "integer overflow" not in status: sql='insert into issuances values(:tx_index, :tx_hash, :block_index, :asset, :quantity, :divisible, :source, :issuer, :transfer, :callable, :call_date, :call_price, :description, :fee_paid, :locked, :status)' issuance_parse_cursor.execute(sql, bindings) else: logger.warn("Not storing [issuance] tx [%s]: %s" % (tx['tx_hash'], status)) logger.debug("Bindings: %s" % (json.dumps(bindings), )) # Credit. if status == 'valid' and quantity: util.credit(db, tx['source'], asset, quantity, action="issuance", event=tx['tx_hash']) issuance_parse_cursor.close()
def validate(db, source, destination, asset, quantity, divisible, callable_, call_date, call_price, description, subasset_parent, subasset_longname, block_index): problems = [] fee = 0 if asset in (config.BTC, config.XCP): problems.append('cannot issue {} or {}'.format(config.BTC, config.XCP)) if call_date is None: call_date = 0 if call_price is None: call_price = 0.0 if description is None: description = "" if divisible is None: divisible = True if isinstance(call_price, int): call_price = float(call_price) #^ helps especially with calls from JS‐based clients, where parseFloat(15) returns 15 (not 15.0), which json takes as an int if not isinstance(quantity, int): problems.append('quantity must be in satoshis') return call_date, call_price, problems, fee, description, divisible, None, None if call_date and not isinstance(call_date, int): problems.append('call_date must be epoch integer') return call_date, call_price, problems, fee, description, divisible, None, None if call_price and not isinstance(call_price, float): problems.append('call_price must be a float') return call_date, call_price, problems, fee, description, divisible, None, None if quantity < 0: problems.append('negative quantity') if call_price < 0: problems.append('negative call price') if call_date < 0: problems.append('negative call date') # Callable, or not. if not callable_: if block_index >= 312500 or config.TESTNET: # Protocol change. call_date = 0 call_price = 0.0 elif block_index >= 310000: # Protocol change. if call_date: problems.append('call date for non‐callable asset') if call_price: problems.append('call price for non‐callable asset') # Valid re-issuance? cursor = db.cursor() cursor.execute( '''SELECT * FROM issuances \ WHERE (status = ? AND asset = ?) ORDER BY tx_index ASC''', ('valid', asset)) issuances = cursor.fetchall() cursor.close() reissued_asset_longname = None if issuances: reissuance = True last_issuance = issuances[-1] reissued_asset_longname = last_issuance['asset_longname'] issuance_locked = False if util.enabled('issuance_lock_fix'): for issuance in issuances: if issuance['locked']: issuance_locked = True break elif last_issuance['locked']: # before the issuance_lock_fix, only the last issuance was checked issuance_locked = True if last_issuance['issuer'] != source: problems.append('issued by another address') if bool(last_issuance['divisible']) != bool(divisible): problems.append('cannot change divisibility') if bool(last_issuance['callable']) != bool(callable_): problems.append('cannot change callability') if last_issuance['call_date'] > call_date and ( call_date != 0 or (block_index < 312500 and not config.TESTNET)): problems.append('cannot advance call date') if last_issuance['call_price'] > call_price: problems.append('cannot reduce call price') if issuance_locked and quantity: problems.append('locked asset and non‐zero quantity') else: reissuance = False if description.lower() == 'lock': problems.append('cannot lock a non‐existent asset') if destination: problems.append('cannot transfer a non‐existent asset') # validate parent ownership for subasset if subasset_longname is not None: cursor = db.cursor() cursor.execute( '''SELECT * FROM issuances \ WHERE (status = ? AND asset = ?) ORDER BY tx_index ASC''', ('valid', subasset_parent)) parent_issuances = cursor.fetchall() cursor.close() if parent_issuances: last_parent_issuance = parent_issuances[-1] if last_parent_issuance['issuer'] != source: problems.append('parent asset owned by another address') else: problems.append('parent asset not found') # validate subasset issuance is not a duplicate if subasset_longname is not None and not reissuance: cursor = db.cursor() cursor.execute( '''SELECT * FROM assets \ WHERE (asset_longname = ?)''', (subasset_longname, )) assets = cursor.fetchall() if len(assets) > 0: problems.append('subasset already exists') # validate that the actual asset is numeric if asset[0] != 'A': problems.append('a subasset must be a numeric asset') # Check for existence of fee funds. if quantity or (block_index >= 315000 or config.TESTNET): # Protocol change. if not reissuance or ( block_index < 310000 and not config.TESTNET ): # Pay fee only upon first issuance. (Protocol change.) cursor = db.cursor() cursor.execute( '''SELECT * FROM balances \ WHERE (address = ? AND asset = ?)''', (source, config.XCP)) balances = cursor.fetchall() cursor.close() if util.enabled('numeric_asset_names'): # Protocol change. if subasset_longname is not None and util.enabled( 'subassets'): # Protocol change. # subasset issuance is 0.25 fee = int(0.25 * config.UNIT) elif len(asset) >= 13: fee = 0 else: fee = int(0.5 * config.UNIT) elif block_index >= 291700 or config.TESTNET: # Protocol change. fee = int(0.5 * config.UNIT) elif block_index >= 286000 or config.TESTNET: # Protocol change. fee = 5 * config.UNIT elif block_index > 281236 or config.TESTNET: # Protocol change. fee = 5 if fee and (not balances or balances[0]['quantity'] < fee): problems.append('insufficient funds') if not (block_index >= 317500 or config.TESTNET): # Protocol change. if len(description) > 42: problems.append('description too long') # For SQLite3 call_date = min(call_date, config.MAX_INT) total = sum([issuance['quantity'] for issuance in issuances]) assert isinstance(quantity, int) if total + quantity > config.MAX_INT: problems.append('total quantity overflow') if destination and quantity: problems.append('cannot issue and transfer simultaneously') # For SQLite3 if util.enabled('integer_overflow_fix', block_index=block_index) and ( fee > config.MAX_INT or quantity > config.MAX_INT): problems.append('integer overflow') return call_date, call_price, problems, fee, description, divisible, reissuance, reissued_asset_longname
def parse(db, tx, message, message_type_id): issuance_parse_cursor = db.cursor() # Unpack message. try: subasset_longname = None if message_type_id == SUBASSET_ID: if not util.enabled('subassets', block_index=tx['block_index']): logger.warn("subassets are not enabled at block %s" % tx['block_index']) raise exceptions.UnpackError # parse a subasset original issuance message asset_id, quantity, divisible, compacted_subasset_length = struct.unpack( SUBASSET_FORMAT, message[0:SUBASSET_FORMAT_LENGTH]) description_length = len( message) - SUBASSET_FORMAT_LENGTH - compacted_subasset_length if description_length < 0: logger.warn("invalid subasset length: [issuance] tx [%s]: %s" % (tx['tx_hash'], compacted_subasset_length)) raise exceptions.UnpackError messages_format = '>{}s{}s'.format(compacted_subasset_length, description_length) compacted_subasset_longname, description = struct.unpack( messages_format, message[SUBASSET_FORMAT_LENGTH:]) subasset_longname = util.expand_subasset_longname( compacted_subasset_longname) callable_, call_date, call_price = False, 0, 0.0 try: description = description.decode('utf-8') except UnicodeDecodeError: description = '' elif (tx['block_index'] > 283271 or config.TESTNET) and len(message) >= LENGTH_2: # Protocol change. if len(message) - LENGTH_2 <= 42: curr_format = FORMAT_2 + '{}p'.format(len(message) - LENGTH_2) else: curr_format = FORMAT_2 + '{}s'.format(len(message) - LENGTH_2) asset_id, quantity, divisible, callable_, call_date, call_price, description = struct.unpack( curr_format, message) call_price = round(call_price, 6) # TODO: arbitrary try: description = description.decode('utf-8') except UnicodeDecodeError: description = '' else: if len(message) != LENGTH_1: raise exceptions.UnpackError asset_id, quantity, divisible = struct.unpack(FORMAT_1, message) callable_, call_date, call_price, description = False, 0, 0.0, '' try: asset = util.generate_asset_name(asset_id, tx['block_index']) except exceptions.AssetNameError: asset = None status = 'invalid: bad asset name' status = 'valid' except exceptions.UnpackError as e: asset, quantity, divisible, callable_, call_date, call_price, description = None, None, None, None, None, None, None status = 'invalid: could not unpack' # parse and validate the subasset from the message subasset_parent = None if status == 'valid' and subasset_longname is not None: # Protocol change. try: # ensure the subasset_longname is valid util.validate_subasset_longname(subasset_longname) subasset_parent, subasset_longname = util.parse_subasset_from_asset_name( subasset_longname) except exceptions.AssetNameError as e: asset = None status = 'invalid: bad subasset name' reissuance = None fee = 0 if status == 'valid': call_date, call_price, problems, fee, description, divisible, reissuance, reissued_asset_longname = validate( db, tx['source'], tx['destination'], asset, quantity, divisible, callable_, call_date, call_price, description, subasset_parent, subasset_longname, block_index=tx['block_index']) if problems: status = 'invalid: ' + '; '.join(problems) if not util.enabled('integer_overflow_fix', block_index=tx['block_index'] ) and 'total quantity overflow' in problems: quantity = 0 if tx['destination']: issuer = tx['destination'] transfer = True quantity = 0 else: issuer = tx['source'] transfer = False # Debit fee. if status == 'valid': util.debit(db, tx['source'], config.XCP, fee, action="issuance fee", event=tx['tx_hash']) # Lock? lock = False if status == 'valid': if description and description.lower() == 'lock': lock = True cursor = db.cursor() issuances = list( cursor.execute( '''SELECT * FROM issuances \ WHERE (status = ? AND asset = ?) ORDER BY tx_index ASC''', ('valid', asset))) cursor.close() description = issuances[-1][ 'description'] # Use last description. (Assume previous issuance exists because tx is valid.) timestamp, value_int, fee_fraction_int = None, None, None if not reissuance: # Add to table of assets. bindings = { 'asset_id': str(asset_id), 'asset_name': str(asset), 'block_index': tx['block_index'], 'asset_longname': subasset_longname, } sql = 'insert into assets values(:asset_id, :asset_name, :block_index, :asset_longname)' issuance_parse_cursor.execute(sql, bindings) if status == 'valid' and reissuance: # when reissuing, add the asset_longname to the issuances table for API lookups asset_longname = reissued_asset_longname else: asset_longname = subasset_longname # Add parsed transaction to message-type–specific table. bindings = { 'tx_index': tx['tx_index'], 'tx_hash': tx['tx_hash'], 'block_index': tx['block_index'], 'asset': asset, 'quantity': quantity, 'divisible': divisible, 'source': tx['source'], 'issuer': issuer, 'transfer': transfer, 'callable': callable_, 'call_date': call_date, 'call_price': call_price, 'description': description, 'fee_paid': fee, 'locked': lock, 'status': status, 'asset_longname': asset_longname, } if "integer overflow" not in status: sql = 'insert into issuances values(:tx_index, :tx_hash, :block_index, :asset, :quantity, :divisible, :source, :issuer, :transfer, :callable, :call_date, :call_price, :description, :fee_paid, :locked, :status, :asset_longname)' issuance_parse_cursor.execute(sql, bindings) else: logger.warn("Not storing [issuance] tx [%s]: %s" % (tx['tx_hash'], status)) logger.debug("Bindings: %s" % (json.dumps(bindings), )) # Credit. if status == 'valid' and quantity: util.credit(db, tx['source'], asset, quantity, action="issuance", event=tx['tx_hash']) issuance_parse_cursor.close()
def compose(db, source, transfer_destination, asset, quantity, divisible, description): # Callability is deprecated, so for re‐issuances set relevant parameters # to old values; for first issuances, make uncallable. cursor = db.cursor() cursor.execute( '''SELECT * FROM issuances \ WHERE (status = ? AND asset = ?) ORDER BY tx_index ASC''', ('valid', asset)) issuances = cursor.fetchall() if issuances: last_issuance = issuances[-1] callable_ = last_issuance['callable'] call_date = last_issuance['call_date'] call_price = last_issuance['call_price'] else: callable_ = False call_date = 0 call_price = 0.0 cursor.close() # check subasset subasset_parent = None subasset_longname = None if util.enabled('subassets'): # Protocol change. subasset_parent, subasset_longname = util.parse_subasset_from_asset_name( asset) if subasset_longname is not None: # try to find an existing subasset sa_cursor = db.cursor() sa_cursor.execute( '''SELECT * FROM assets \ WHERE (asset_longname = ?)''', (subasset_longname, )) assets = sa_cursor.fetchall() sa_cursor.close() if len(assets) > 0: # this is a reissuance asset = assets[0]['asset_name'] else: # this is a new issuance # generate a random numeric asset id which will map to this subasset asset = util.generate_random_asset() call_date, call_price, problems, fee, description, divisible, reissuance, reissued_asset_longname = validate( db, source, transfer_destination, asset, quantity, divisible, callable_, call_date, call_price, description, subasset_parent, subasset_longname, util.CURRENT_BLOCK_INDEX) if problems: raise exceptions.ComposeError(problems) asset_id = util.generate_asset_id(asset, util.CURRENT_BLOCK_INDEX) if subasset_longname is None or reissuance: # Type 20 standard issuance FORMAT_2 >QQ??If # used for standard issuances and all reissuances data = message_type.pack(ID) if len(description) <= 42: curr_format = FORMAT_2 + '{}p'.format(len(description) + 1) else: curr_format = FORMAT_2 + '{}s'.format(len(description)) data += struct.pack(curr_format, asset_id, quantity, 1 if divisible else 0, 1 if callable_ else 0, call_date or 0, call_price or 0.0, description.encode('utf-8')) else: # Type 21 subasset issuance SUBASSET_FORMAT >QQ?B # Used only for initial subasset issuance # compacts a subasset name to save space compacted_subasset_longname = util.compact_subasset_longname( subasset_longname) compacted_subasset_length = len(compacted_subasset_longname) data = message_type.pack(SUBASSET_ID) curr_format = SUBASSET_FORMAT + '{}s'.format( compacted_subasset_length) + '{}s'.format(len(description)) data += struct.pack(curr_format, asset_id, quantity, 1 if divisible else 0, compacted_subasset_length, compacted_subasset_longname, description.encode('utf-8')) if transfer_destination: destination_outputs = [(transfer_destination, None)] else: destination_outputs = [] return (source, destination_outputs, data)
def get_tx_info2(tx_hex, block_parser=None): """Get multisig transaction info. The destinations, if they exists, always comes before the data output; the change, if it exists, always comes after. """ # Decode transaction binary. ctx = backend.deserialize(tx_hex) def arc4_decrypt(cyphertext): '''Un‐obfuscate. Initialise key once per attempt.''' key = ARC4.new(ctx.vin[0].prevout.hash[::-1]) return key.decrypt(cyphertext) def get_opreturn(asm): if len(asm) == 2 and asm[0] == 'OP_RETURN': pubkeyhash = asm[1] if type(pubkeyhash) == bytes: return pubkeyhash raise DecodeError('invalid OP_RETURN') def decode_opreturn(asm): chunk = get_opreturn(asm) chunk = arc4_decrypt(chunk) if chunk[:len(config.PREFIX)] == config.PREFIX: # Data destination, data = None, chunk[len(config.PREFIX):] else: raise DecodeError('unrecognised OP_RETURN output') return destination, data def decode_checksig(asm): pubkeyhash = script.get_checksig(asm) chunk = arc4_decrypt(pubkeyhash) if chunk[1:len(config.PREFIX) + 1] == config.PREFIX: # Data # Padding byte in each output (instead of just in the last one) so that encoding methods may be mixed. Also, it’s just not very much data. chunk_length = chunk[0] chunk = chunk[1:chunk_length + 1] destination, data = None, chunk[len(config.PREFIX):] else: # Destination pubkeyhash = binascii.hexlify(pubkeyhash).decode('utf-8') destination, data = script.base58_check_encode(pubkeyhash, config.ADDRESSVERSION), None return destination, data def decode_checkmultisig(asm): pubkeys, signatures_required = script.get_checkmultisig(asm) chunk = b'' for pubkey in pubkeys[:-1]: # (No data in last pubkey.) chunk += pubkey[1:-1] # Skip sign byte and nonce byte. chunk = arc4_decrypt(chunk) if chunk[1:len(config.PREFIX) + 1] == config.PREFIX: # Data # Padding byte in each output (instead of just in the last one) so that encoding methods may be mixed. Also, it’s just not very much data. chunk_length = chunk[0] chunk = chunk[1:chunk_length + 1] destination, data = None, chunk[len(config.PREFIX):] else: # Destination pubkeyhashes = [script.pubkey_to_pubkeyhash(pubkey) for pubkey in pubkeys] destination, data = script.construct_array(signatures_required, pubkeyhashes, len(pubkeyhashes)), None return destination, data # Ignore coinbase transactions. if ctx.is_coinbase(): raise DecodeError('coinbase transaction') # Get destinations and data outputs. destinations, btc_amount, fee, data = [], 0, 0, b'' for vout in ctx.vout: # Fee is the input values minus output values. output_value = vout.nValue fee -= output_value # Ignore transactions with invalid script. try: asm = script.get_asm(vout.scriptPubKey) except CScriptInvalidError as e: raise DecodeError(e) if asm[0] == 'OP_RETURN': new_destination, new_data = decode_opreturn(asm) elif asm[-1] == 'OP_CHECKSIG': new_destination, new_data = decode_checksig(asm) elif asm[-1] == 'OP_CHECKMULTISIG': new_destination, new_data = decode_checkmultisig(asm) else: raise DecodeError('unrecognised output type') assert not (new_destination and new_data) assert new_destination != None or new_data != None # `decode_*()` should never return `None, None`. if util.enabled('null_data_check'): if new_data == []: raise DecodeError('new destination is `None`') # All destinations come before all data. if not data and not new_data and destinations != [config.UNSPENDABLE,]: destinations.append(new_destination) btc_amount += output_value else: if new_destination: # Change. break else: # Data. data += new_data # Only look for source if data were found or destination is `UNSPENDABLE`, # for speed. if not data and destinations != [config.UNSPENDABLE,]: raise BTCOnlyError('no data and not unspendable') # Collect all (unique) source addresses. sources = [] for vin in ctx.vin[:]: # Loop through inputs. # Get the full transaction data for this input transaction. if block_parser: vin_tx = block_parser.read_raw_transaction(ib2h(vin.prevout.hash)) vin_ctx = backend.deserialize(vin_tx['__data__']) else: vin_tx = backend.getrawtransaction(ib2h(vin.prevout.hash)) vin_ctx = backend.deserialize(vin_tx) vout = vin_ctx.vout[vin.prevout.n] fee += vout.nValue asm = script.get_asm(vout.scriptPubKey) if asm[-1] == 'OP_CHECKSIG': new_source, new_data = decode_checksig(asm) if new_data or not new_source: raise DecodeError('data in source') elif asm[-1] == 'OP_CHECKMULTISIG': new_source, new_data = decode_checkmultisig(asm) if new_data or not new_source: raise DecodeError('data in source') else: raise DecodeError('unrecognised source type') # Collect unique sources. if new_source not in sources: sources.append(new_source) sources = '-'.join(sources) destinations = '-'.join(destinations) return sources, destinations, btc_amount, round(fee), data
def parse (db, tx, message): cursor = db.cursor() # Unpack message. try: if len(message) - LENGTH <= 52: curr_format = FORMAT + '{}p'.format(len(message) - LENGTH) else: curr_format = FORMAT + '{}s'.format(len(message) - LENGTH) timestamp, value, fee_fraction_int, text = struct.unpack(curr_format, message) try: text = text.decode('utf-8') except UnicodeDecodeError: text = '' status = 'valid' except (struct.error) as e: timestamp, value, fee_fraction_int, text = 0, None, 0, None status = 'invalid: could not unpack' if status == 'valid': # For SQLite3 timestamp = min(timestamp, config.MAX_INT) value = min(value, config.MAX_INT) problems = validate(db, tx['source'], timestamp, value, fee_fraction_int, text, tx['block_index']) if problems: status = 'invalid: ' + '; '.join(problems) # Lock? lock = False if text and text.lower() == 'lock': lock = True timestamp, value, fee_fraction_int, text = 0, None, None, None else: lock = False # Add parsed transaction to message-type–specific table. bindings = { 'tx_index': tx['tx_index'], 'tx_hash': tx['tx_hash'], 'block_index': tx['block_index'], 'source': tx['source'], 'timestamp': timestamp, 'value': value, 'fee_fraction_int': fee_fraction_int, 'text': text, 'locked': lock, 'status': status, } sql='insert into broadcasts values(:tx_index, :tx_hash, :block_index, :source, :timestamp, :value, :fee_fraction_int, :text, :locked, :status)' cursor.execute(sql, bindings) # Negative values (default to ignore). if value == None or value < 0: # Cancel Open Bets? if value == -2: cursor.execute('''SELECT * FROM bets \ WHERE (status = ? AND feed_address = ?)''', ('open', tx['source'])) for i in list(cursor): bet.cancel_bet(db, i, 'dropped', tx['block_index']) # Cancel Pending Bet Matches? if value == -3: cursor.execute('''SELECT * FROM bet_matches \ WHERE (status = ? AND feed_address = ?)''', ('pending', tx['source'])) for bet_match in list(cursor): bet.cancel_bet_match(db, bet_match, 'dropped', tx['block_index']) cursor.close() return # Handle bet matches that use this feed. cursor.execute('''SELECT * FROM bet_matches \ WHERE (status=? AND feed_address=?) ORDER BY tx1_index ASC, tx0_index ASC''', ('pending', tx['source'])) for bet_match in cursor.fetchall(): if util.enabled('max_fee_fraction'): if status != 'valid': break broadcast_bet_match_cursor = db.cursor() bet_match_id = util.make_id(bet_match['tx0_hash'], bet_match['tx1_hash']) bet_match_status = None # Calculate total funds held in escrow and total fee to be paid if # the bet match is settled. Escrow less fee is amount to be paid back # to betters. total_escrow = bet_match['forward_quantity'] + bet_match['backward_quantity'] fee_fraction = fee_fraction_int / config.UNIT fee = int(fee_fraction * total_escrow) # Truncate. escrow_less_fee = total_escrow - fee # Get known bet match type IDs. cfd_type_id = util.BET_TYPE_ID['BullCFD'] + util.BET_TYPE_ID['BearCFD'] equal_type_id = util.BET_TYPE_ID['Equal'] + util.BET_TYPE_ID['NotEqual'] # Get the bet match type ID of this bet match. bet_match_type_id = bet_match['tx0_bet_type'] + bet_match['tx1_bet_type'] # Contract for difference, with determinate settlement date. if bet_match_type_id == cfd_type_id: # Recognise tx0, tx1 as the bull, bear (in the right direction). if bet_match['tx0_bet_type'] < bet_match['tx1_bet_type']: bull_address = bet_match['tx0_address'] bear_address = bet_match['tx1_address'] bull_escrow = bet_match['forward_quantity'] bear_escrow = bet_match['backward_quantity'] else: bull_address = bet_match['tx1_address'] bear_address = bet_match['tx0_address'] bull_escrow = bet_match['backward_quantity'] bear_escrow = bet_match['forward_quantity'] leverage = Fraction(bet_match['leverage'], 5040) initial_value = bet_match['initial_value'] bear_credit = bear_escrow - (value - initial_value) * leverage * config.UNIT bull_credit = escrow_less_fee - bear_credit bear_credit = round(bear_credit) bull_credit = round(bull_credit) # Liquidate, as necessary. if bull_credit >= escrow_less_fee or bull_credit <= 0: if bull_credit >= escrow_less_fee: bull_credit = escrow_less_fee bear_credit = 0 bet_match_status = 'settled: liquidated for bull' util.credit(db, bull_address, config.XCP, bull_credit, action='bet {}'.format(bet_match_status), event=tx['tx_hash']) elif bull_credit <= 0: bull_credit = 0 bear_credit = escrow_less_fee bet_match_status = 'settled: liquidated for bear' util.credit(db, bear_address, config.XCP, bear_credit, action='bet {}'.format(bet_match_status), event=tx['tx_hash']) # Pay fee to feed. util.credit(db, bet_match['feed_address'], config.XCP, fee, action='feed fee', event=tx['tx_hash']) # For logging purposes. bindings = { 'bet_match_id': bet_match_id, 'bet_match_type_id': bet_match_type_id, 'block_index': tx['block_index'], 'settled': False, 'bull_credit': bull_credit, 'bear_credit': bear_credit, 'winner': None, 'escrow_less_fee': None, 'fee': fee } sql='insert into bet_match_resolutions values(:bet_match_id, :bet_match_type_id, :block_index, :settled, :bull_credit, :bear_credit, :winner, :escrow_less_fee, :fee)' cursor.execute(sql, bindings) # Settle (if not liquidated). elif timestamp >= bet_match['deadline']: bet_match_status = 'settled' util.credit(db, bull_address, config.XCP, bull_credit, action='bet {}'.format(bet_match_status), event=tx['tx_hash']) util.credit(db, bear_address, config.XCP, bear_credit, action='bet {}'.format(bet_match_status), event=tx['tx_hash']) # Pay fee to feed. util.credit(db, bet_match['feed_address'], config.XCP, fee, action='feed fee', event=tx['tx_hash']) # For logging purposes. bindings = { 'bet_match_id': bet_match_id, 'bet_match_type_id': bet_match_type_id, 'block_index': tx['block_index'], 'settled': True, 'bull_credit': bull_credit, 'bear_credit': bear_credit, 'winner': None, 'escrow_less_fee': None, 'fee': fee } sql='insert into bet_match_resolutions values(:bet_match_id, :bet_match_type_id, :block_index, :settled, :bull_credit, :bear_credit, :winner, :escrow_less_fee, :fee)' cursor.execute(sql, bindings) # Equal[/NotEqual] bet. elif bet_match_type_id == equal_type_id and timestamp >= bet_match['deadline']: # Recognise tx0, tx1 as the bull, bear (in the right direction). if bet_match['tx0_bet_type'] < bet_match['tx1_bet_type']: equal_address = bet_match['tx0_address'] notequal_address = bet_match['tx1_address'] else: equal_address = bet_match['tx1_address'] notequal_address = bet_match['tx0_address'] # Decide who won, and credit appropriately. if value == bet_match['target_value']: winner = 'Equal' bet_match_status = 'settled: for equal' util.credit(db, equal_address, config.XCP, escrow_less_fee, action='bet {}'.format(bet_match_status), event=tx['tx_hash']) else: winner = 'NotEqual' bet_match_status = 'settled: for notequal' util.credit(db, notequal_address, config.XCP, escrow_less_fee, action='bet {}'.format(bet_match_status), event=tx['tx_hash']) # Pay fee to feed. util.credit(db, bet_match['feed_address'], config.XCP, fee, action='feed fee', event=tx['tx_hash']) # For logging purposes. bindings = { 'bet_match_id': bet_match_id, 'bet_match_type_id': bet_match_type_id, 'block_index': tx['block_index'], 'settled': None, 'bull_credit': None, 'bear_credit': None, 'winner': winner, 'escrow_less_fee': escrow_less_fee, 'fee': fee } sql='insert into bet_match_resolutions values(:bet_match_id, :bet_match_type_id, :block_index, :settled, :bull_credit, :bear_credit, :winner, :escrow_less_fee, :fee)' cursor.execute(sql, bindings) # Update the bet match’s status. if bet_match_status: bindings = { 'status': bet_match_status, 'bet_match_id': util.make_id(bet_match['tx0_hash'], bet_match['tx1_hash']) } sql='update bet_matches set status = :status where id = :bet_match_id' cursor.execute(sql, bindings) log.message(db, tx['block_index'], 'update', 'bet_matches', bindings) broadcast_bet_match_cursor.close() cursor.close()
def test_bytespersigop(server_db): assert util.enabled('bytespersigop') == False # ADDR[0], bytespersigop=False, desc 41 bytes, opreturn txhex = api.compose_transaction( server_db, 'issuance', {'source': ADDR[0], 'asset': 'TESTING', 'quantity': 100, 'transfer_destination': None, 'divisible': False, 'description': 't' * 41}, ) tx = bitcoinlib.core.CTransaction.deserialize(binascii.unhexlify(txhex)) assert len(tx.vin) == 1 assert len(tx.vout) == 2 assert "OP_RETURN" in repr(tx.vout[0].scriptPubKey) # ADDR[0], bytespersigop=False, desc 42 bytes, multisig txhex = api.compose_transaction( server_db, 'issuance', {'source': ADDR[0], 'asset': 'TESTING', 'quantity': 100, 'transfer_destination': None, 'divisible': False, 'description': 't' * 42}, ) tx = bitcoinlib.core.CTransaction.deserialize(binascii.unhexlify(txhex)) assert len(tx.vin) == 1 assert len(tx.vout) == 3 assert "OP_CHECKMULTISIG" in repr(tx.vout[0].scriptPubKey) assert "OP_CHECKMULTISIG" in repr(tx.vout[1].scriptPubKey) # enable byterpersigop with util_test.MockProtocolChangesContext(bytespersigop=True): assert util.enabled('bytespersigop') == True # ADDR[0], bytespersigop=True, desc 41 bytes, opreturn txhex = api.compose_transaction( server_db, 'issuance', {'source': ADDR[0], 'asset': 'TESTING', 'quantity': 100, 'transfer_destination': None, 'divisible': False, 'description': 't' * 41}, ) tx = bitcoinlib.core.CTransaction.deserialize(binascii.unhexlify(txhex)) assert len(tx.vin) == 1 assert len(tx.vout) == 2 assert "OP_RETURN" in repr(tx.vout[0].scriptPubKey) # ADDR[0], bytespersigop=True, desc 42 bytes, pubkeyhash encoding # pubkeyhash because ADDR[0] only has 1 UTXO to spend from txhex = api.compose_transaction( server_db, 'issuance', {'source': ADDR[0], 'asset': 'TESTING', 'quantity': 100, 'transfer_destination': None, 'divisible': False, 'description': 't' * 42}, ) tx = bitcoinlib.core.CTransaction.deserialize(binascii.unhexlify(txhex)) assert len(tx.vin) == 1 assert len(tx.vout) == 8 for i in range(7): assert "OP_CHECKSIG" in repr(tx.vout[i].scriptPubKey) # ADDR[0], bytespersigop=True, desc 20 bytes, FORCED multisig encoding # will error because it's not possible, ADDR[0] only has 1 UTXO with pytest.raises(exceptions.EncodingError): txhex = api.compose_transaction( server_db, 'issuance', {'source': ADDR[0], 'asset': 'TESTING', 'quantity': 100, 'transfer_destination': None, 'divisible': False, 'description': 't' * 20}, encoding='multisig' ) # ADDR[1], bytespersigop=True, desc 41 bytes, opreturn encoding txhex = api.compose_transaction( server_db, 'issuance', {'source': ADDR[1], 'asset': 'TESTING', 'quantity': 100, 'transfer_destination': None, 'divisible': False, 'description': 't' * 41}, ) tx = bitcoinlib.core.CTransaction.deserialize(binascii.unhexlify(txhex)) assert len(tx.vin) == 1 assert len(tx.vout) == 2 assert "OP_RETURN" in repr(tx.vout[0].scriptPubKey) # ADDR[1], bytespersigop=True, desc 20 bytes, FORCED encoding=multisig # will use 2 UTXOs to make the bytes:sigop ratio in our favor txhex = api.compose_transaction( server_db, 'issuance', {'source': ADDR[1], 'asset': 'TESTING', 'quantity': 100, 'transfer_destination': None, 'divisible': False, 'description': 't' * 20}, encoding='multisig' ) tx = bitcoinlib.core.CTransaction.deserialize(binascii.unhexlify(txhex)) assert len(tx.vin) == 2 assert len(tx.vout) == 2 assert "OP_CHECKMULTISIG" in repr(tx.vout[0].scriptPubKey)
def parse (db, tx, message): cursor = db.cursor() # Unpack message. try: if len(message) != LENGTH: raise exceptions.UnpackError tx0_hash_bytes, tx1_hash_bytes = struct.unpack(FORMAT, message) tx0_hash, tx1_hash = binascii.hexlify(tx0_hash_bytes).decode('utf-8'), binascii.hexlify(tx1_hash_bytes).decode('utf-8') order_match_id = util.make_id(tx0_hash, tx1_hash) status = 'valid' except (exceptions.UnpackError, struct.error) as e: tx0_hash, tx1_hash, order_match_id = None, None, None status = 'invalid: could not unpack' if status == 'valid': destination, btc_quantity, escrowed_asset, escrowed_quantity, order_match, problems = validate(db, tx['source'], order_match_id, tx['block_index']) if problems: order_match = None status = 'invalid: ' + '; '.join(problems) if status == 'valid': # BTC must be paid all at once. if tx['btc_amount'] >= btc_quantity: # Credit source address for the currency that he bought with the bitcoins. util.credit(db, tx['source'], escrowed_asset, escrowed_quantity, action='btcpay', event=tx['tx_hash']) status = 'valid' # Update order match. bindings = { 'status': 'completed', 'order_match_id': order_match_id } sql='update order_matches set status = :status where id = :order_match_id' cursor.execute(sql, bindings) log.message(db, tx['block_index'], 'update', 'order_matches', bindings) # Update give and get order status as filled if order_match is completed if util.enabled('btc_order_filled'): bindings = { 'status': 'pending', 'tx0_hash': tx0_hash, 'tx1_hash': tx1_hash } sql='select * from order_matches where status = :status and ((tx0_hash in (:tx0_hash, :tx1_hash)) or ((tx1_hash in (:tx0_hash, :tx1_hash))))' cursor.execute(sql, bindings) order_matches = cursor.fetchall() if len(order_matches) == 0: # mark both btc get and give orders as filled when order_match is completed and give or get remaining = 0 bindings = { 'status': 'filled', 'tx0_hash': tx0_hash, 'tx1_hash': tx1_hash } sql='update orders set status = :status where ((tx_hash in (:tx0_hash, :tx1_hash)) and ((give_remaining = 0) or (get_remaining = 0)))' cursor.execute(sql, bindings) else: # always mark btc get order as filled when order_match is completed and give or get remaining = 0 bindings = { 'status': 'filled', 'source': tx['destination'], 'tx0_hash': tx0_hash, 'tx1_hash': tx1_hash } sql='update orders set status = :status where ((tx_hash in (:tx0_hash, :tx1_hash)) and ((give_remaining = 0) or (get_remaining = 0)) and (source = :source))' cursor.execute(sql, bindings) # Add parsed transaction to message-type–specific table. bindings = { 'tx_index': tx['tx_index'], 'tx_hash': tx['tx_hash'], 'block_index': tx['block_index'], 'source': tx['source'], 'destination': tx['destination'], 'btc_amount': tx['btc_amount'], 'order_match_id': order_match_id, 'status': status, } if "integer overflow" not in status: sql = 'insert into btcpays values(:tx_index, :tx_hash, :block_index, :source, :destination, :btc_amount, :order_match_id, :status)' cursor.execute(sql, bindings) else: logger.warn("Not storing [btcpay] tx [%s]: %s" % (tx['tx_hash'], status)) logger.debug("Bindings: %s" % (json.dumps(bindings), )) cursor.close()
def construct (db, tx_info, encoding='auto', fee_per_kb=config.DEFAULT_FEE_PER_KB, estimate_fee_per_kb=None, estimate_fee_per_kb_nblocks=config.ESTIMATE_FEE_NBLOCKS, regular_dust_size=config.DEFAULT_REGULAR_DUST_SIZE, multisig_dust_size=config.DEFAULT_MULTISIG_DUST_SIZE, op_return_value=config.DEFAULT_OP_RETURN_VALUE, exact_fee=None, fee_provided=0, provided_pubkeys=None, dust_return_pubkey=None, allow_unconfirmed_inputs=False, unspent_tx_hash=None, custom_inputs=None, disable_utxo_locks=False): if estimate_fee_per_kb is None: estimate_fee_per_kb = config.ESTIMATE_FEE_PER_KB global UTXO_LOCKS desired_encoding = encoding (source, destination_outputs, data) = tx_info if dust_return_pubkey: dust_return_pubkey = binascii.unhexlify(dust_return_pubkey) # Source. # If public key is necessary for construction of (unsigned) # transaction, use the public key provided, or find it from the # blockchain. if source: script.validate(source) source_is_p2sh = script.is_p2sh(source) # Sanity checks. if exact_fee and not isinstance(exact_fee, int): raise exceptions.TransactionError('Exact fees must be in satoshis.') if not isinstance(fee_provided, int): raise exceptions.TransactionError('Fee provided must be in satoshis.') if UTXO_LOCKS is None and config.UTXO_LOCKS_MAX_ADDRESSES > 0: # initialize if configured UTXO_LOCKS = util.DictCache(size=config.UTXO_LOCKS_MAX_ADDRESSES) '''Destinations''' # Destination outputs. # Replace multi‐sig addresses with multi‐sig pubkeys. Check that the # destination output isn’t a dust output. Set null values to dust size. destination_outputs_new = [] for (address, value) in destination_outputs: # Value. if script.is_multisig(address): dust_size = multisig_dust_size else: dust_size = regular_dust_size if value == None: value = dust_size elif value < dust_size: raise exceptions.TransactionError('Destination output is dust.') # Address. script.validate(address) if script.is_multisig(address): destination_outputs_new.append((backend.multisig_pubkeyhashes_to_pubkeys(address, provided_pubkeys), value)) else: destination_outputs_new.append((address, value)) destination_outputs = destination_outputs_new destination_btc_out = sum([value for address, value in destination_outputs]) '''Data''' if data: # Data encoding methods (choose and validate). if encoding == 'auto': if len(data) + len(config.PREFIX) <= config.OP_RETURN_MAX_SIZE: encoding = 'opreturn' else: encoding = 'multisig' elif encoding not in ('pubkeyhash', 'multisig', 'opreturn'): raise exceptions.TransactionError('Unknown encoding‐scheme.') if encoding == 'multisig': # dust_return_pubkey should be set or explicitly set to False to use the default configured for the node # the default for the node is optional so could fail if (source_is_p2sh and dust_return_pubkey is None) or (dust_return_pubkey is False and config.P2SH_DUST_RETURN_PUBKEY is None): raise exceptions.TransactionError("Can't use multisig encoding when source is P2SH and no dust_return_pubkey is provided.") elif dust_return_pubkey is False: dust_return_pubkey = binascii.unhexlify(config.P2SH_DUST_RETURN_PUBKEY) # Divide data into chunks. if encoding == 'pubkeyhash': # Prefix is also a suffix here. chunk_size = 20 - 1 - 8 elif encoding == 'multisig': # Two pubkeys, minus length byte, minus prefix, minus two nonces, # minus two sign bytes. chunk_size = (33 * 2) - 1 - 8 - 2 - 2 elif encoding == 'opreturn': chunk_size = config.OP_RETURN_MAX_SIZE if len(data) + len(config.PREFIX) > chunk_size: raise exceptions.TransactionError('One `OP_RETURN` output per transaction.') data_array = list(chunks(data, chunk_size)) # Data outputs. if encoding == 'multisig': data_value = multisig_dust_size elif encoding == 'opreturn': data_value = op_return_value else: # Pay‐to‐PubKeyHash, e.g. data_value = regular_dust_size data_output = (data_array, data_value) if not dust_return_pubkey: if encoding == 'multisig': dust_return_pubkey = get_dust_return_pubkey(source, provided_pubkeys, encoding) else: dust_return_pubkey = None else: data_array = [] data_output = None dust_return_pubkey = None data_btc_out = sum([data_value for data_chunk in data_array]) '''Inputs''' # Calculate collective size of outputs, for fee calculation. p2pkhsize = 25 + 9 if encoding == 'multisig': data_output_size = 81 # 71 for the data elif encoding == 'opreturn': data_output_size = 90 # 80 for the data else: data_output_size = p2pkhsize # Pay‐to‐PubKeyHash (25 for the data?) outputs_size = (p2pkhsize * len(destination_outputs)) + (len(data_array) * data_output_size) # Get inputs. multisig_inputs = not data use_inputs = custom_inputs # Array of UTXOs, as retrieved by listunspent function from bitcoind if custom_inputs is None: if unspent_tx_hash is not None: unspent = backend.get_unspent_txouts(source, unconfirmed=allow_unconfirmed_inputs, unspent_tx_hash=unspent_tx_hash, multisig_inputs=multisig_inputs) else: unspent = backend.get_unspent_txouts(source, unconfirmed=allow_unconfirmed_inputs, multisig_inputs=multisig_inputs) # filter out any locked UTXOs to prevent creating transactions that spend the same UTXO when they're created at the same time if UTXO_LOCKS is not None and source in UTXO_LOCKS: unspentkeys = {make_outkey(output) for output in unspent} filtered_unspentkeys = unspentkeys - UTXO_LOCKS[source].keys() unspent = [output for output in unspent if make_outkey(output) in filtered_unspentkeys] unspent = backend.sort_unspent_txouts(unspent) logger.debug('Sorted candidate UTXOs: {}'.format([print_coin(coin) for coin in unspent])) use_inputs = unspent # use backend estimated fee_per_kb if estimate_fee_per_kb: estimated_fee_per_kb = backend.fee_per_kb(estimate_fee_per_kb_nblocks) if estimated_fee_per_kb is not None: fee_per_kb = max(estimated_fee_per_kb, fee_per_kb) # never drop below the default fee_per_kb logger.debug('Fee/KB {:.8f}'.format(fee_per_kb / config.UNIT)) inputs = [] btc_in = 0 change_quantity = 0 sufficient_funds = False final_fee = fee_per_kb desired_input_count = 1 if encoding == 'multisig' and data_array and util.enabled('bytespersigop'): desired_input_count = len(data_array) * 2 for coin in use_inputs: logger.debug('New input: {}'.format(print_coin(coin))) inputs.append(coin) btc_in += round(coin['amount'] * config.UNIT) size = 181 * len(inputs) + outputs_size + 10 necessary_fee = int(size / 1000 * fee_per_kb) # If exact fee is specified, use that. Otherwise, calculate size of tx # and base fee on that (plus provide a minimum fee for selling BTC). if exact_fee: final_fee = exact_fee else: final_fee = max(fee_provided, necessary_fee) # Check if good. btc_out = destination_btc_out + data_btc_out change_quantity = btc_in - (btc_out + final_fee) logger.debug('Size: {} Fee: {:.8f} Change quantity: {:.8f} BTC'.format(size, final_fee / config.UNIT, change_quantity / config.UNIT)) # If change is necessary, must not be a dust output. if change_quantity == 0 or change_quantity >= regular_dust_size: sufficient_funds = True if len(inputs) >= desired_input_count: break if not sufficient_funds: # Approximate needed change, fee by with most recently calculated # quantities. btc_out = destination_btc_out + data_btc_out total_btc_out = btc_out + max(change_quantity, 0) + final_fee raise exceptions.BalanceError('Insufficient {} at address {}. (Need approximately {} {}.) To spend unconfirmed coins, use the flag `--unconfirmed`. (Unconfirmed coins cannot be spent from multi‐sig addresses.)'.format(config.BTC, source, total_btc_out / config.UNIT, config.BTC)) # Lock the source's inputs (UTXOs) chosen for this transaction if UTXO_LOCKS is not None and not disable_utxo_locks: if source not in UTXO_LOCKS: UTXO_LOCKS[source] = cachetools.TTLCache( UTXO_LOCKS_PER_ADDRESS_MAXSIZE, config.UTXO_LOCKS_MAX_AGE) for input in inputs: UTXO_LOCKS[source][make_outkey(input)] = input logger.debug("UTXO locks: Potentials ({}): {}, Used: {}, locked UTXOs: {}".format( len(unspent), [make_outkey(coin) for coin in unspent], [make_outkey(input) for input in inputs], list(UTXO_LOCKS[source].keys()))) '''Finish''' # Change output. if change_quantity: if script.is_multisig(source): change_address = backend.multisig_pubkeyhashes_to_pubkeys(source, provided_pubkeys) else: change_address = source change_output = (change_address, change_quantity) else: change_output = None # in bitcoin core v0.12.1 a -bytespersigop was added that messes with bare multisig transactions, # as a safeguard we fall back to pubkeyhash encoding when unsure # when len(inputs) > len(data_outputs) there's more bytes:sigops ratio and we can safely continue if encoding == 'multisig' and inputs and data_output and len(inputs) < len(data_array) * 2 and util.enabled('bytespersigop'): # if auto encoding we can do pubkeyhash encoding instead if desired_encoding == 'auto': return construct(db, tx_info, encoding='pubkeyhash', fee_per_kb=fee_per_kb, regular_dust_size=regular_dust_size, multisig_dust_size=multisig_dust_size, op_return_value=op_return_value, exact_fee=exact_fee, fee_provided=fee_provided, provided_pubkeys=provided_pubkeys, allow_unconfirmed_inputs=allow_unconfirmed_inputs, unspent_tx_hash=unspent_tx_hash, custom_inputs=custom_inputs) # otherwise raise exception else: raise exceptions.EncodingError("multisig will be rejected by Bitcoin Core >= v0.12.1, you should use `encoding=auto` or `encoding=pubkeyhash`") # Serialise inputs and outputs. unsigned_tx = serialise(encoding, inputs, destination_outputs, data_output, change_output, dust_return_pubkey=dust_return_pubkey) unsigned_tx_hex = binascii.hexlify(unsigned_tx).decode('utf-8') '''Sanity Check''' from counterpartylib.lib import blocks # Desired transaction info. (desired_source, desired_destination_outputs, desired_data) = tx_info desired_source = script.make_canonical(desired_source) desired_destination = script.make_canonical(desired_destination_outputs[0][0]) if desired_destination_outputs else '' # NOTE: Include change in destinations for BTC transactions. # if change_output and not desired_data and desired_destination != config.UNSPENDABLE: # if desired_destination == '': # desired_destination = desired_source # else: # desired_destination += '-{}'.format(desired_source) # NOTE if desired_data == None: desired_data = b'' # Parsed transaction info. try: parsed_source, parsed_destination, x, y, parsed_data = blocks._get_tx_info(unsigned_tx_hex) except exceptions.BTCOnlyError: # Skip BTC‐only transactions. return unsigned_tx_hex desired_source = script.make_canonical(desired_source) # Check desired info against parsed info. desired = (desired_source, desired_destination, desired_data) parsed = (parsed_source, parsed_destination, parsed_data) if desired != parsed: # Unlock (revert) UTXO locks if UTXO_LOCKS is not None: for input in inputs: UTXO_LOCKS[source].pop(make_outkey(input), None) raise exceptions.TransactionError('Constructed transaction does not parse correctly: {} ≠ {}'.format(desired, parsed)) return unsigned_tx_hex
def compose (db, source, transfer_destination, asset, quantity, divisible, description): # Callability is deprecated, so for re‐issuances set relevant parameters # to old values; for first issuances, make uncallable. cursor = db.cursor() cursor.execute('''SELECT * FROM issuances \ WHERE (status = ? AND asset = ?) ORDER BY tx_index ASC''', ('valid', asset)) issuances = cursor.fetchall() if issuances: last_issuance = issuances[-1] callable_ = last_issuance['callable'] call_date = last_issuance['call_date'] call_price = last_issuance['call_price'] else: callable_ = False call_date = 0 call_price = 0.0 cursor.close() # check subasset subasset_parent = None subasset_longname = None if util.enabled('subassets'): # Protocol change. subasset_parent, subasset_longname = util.parse_subasset_from_asset_name(asset) if subasset_longname is not None: # try to find an existing subasset sa_cursor = db.cursor() sa_cursor.execute('''SELECT * FROM assets \ WHERE (asset_longname = ?)''', (subasset_longname,)) assets = sa_cursor.fetchall() sa_cursor.close() if len(assets) > 0: # this is a reissuance asset = assets[0]['asset_name'] else: # this is a new issuance # generate a random numeric asset id which will map to this subasset asset = util.generate_random_asset() call_date, call_price, problems, fee, description, divisible, reissuance, reissued_asset_longname = validate(db, source, transfer_destination, asset, quantity, divisible, callable_, call_date, call_price, description, subasset_parent, subasset_longname, util.CURRENT_BLOCK_INDEX) if problems: raise exceptions.ComposeError(problems) asset_id = util.generate_asset_id(asset, util.CURRENT_BLOCK_INDEX) if subasset_longname is None or reissuance: # Type 20 standard issuance FORMAT_2 >QQ??If # used for standard issuances and all reissuances data = message_type.pack(ID) if len(description) <= 42: curr_format = FORMAT_2 + '{}p'.format(len(description) + 1) else: curr_format = FORMAT_2 + '{}s'.format(len(description)) data += struct.pack(curr_format, asset_id, quantity, 1 if divisible else 0, 1 if callable_ else 0, call_date or 0, call_price or 0.0, description.encode('utf-8')) else: # Type 21 subasset issuance SUBASSET_FORMAT >QQ?B # Used only for initial subasset issuance # compacts a subasset name to save space compacted_subasset_longname = util.compact_subasset_longname(subasset_longname) compacted_subasset_length = len(compacted_subasset_longname) data = message_type.pack(SUBASSET_ID) curr_format = SUBASSET_FORMAT + '{}s'.format(compacted_subasset_length) + '{}s'.format(len(description)) data += struct.pack(curr_format, asset_id, quantity, 1 if divisible else 0, compacted_subasset_length, compacted_subasset_longname, description.encode('utf-8')) if transfer_destination: destination_outputs = [(transfer_destination, None)] else: destination_outputs = [] return (source, destination_outputs, data)
def parse (db, tx, message, message_type_id): issuance_parse_cursor = db.cursor() # Unpack message. try: subasset_longname = None if message_type_id == SUBASSET_ID: if not util.enabled('subassets', block_index=tx['block_index']): logger.warn("subassets are not enabled at block %s" % tx['block_index']) raise exceptions.UnpackError # parse a subasset original issuance message asset_id, quantity, divisible, compacted_subasset_length = struct.unpack(SUBASSET_FORMAT, message[0:SUBASSET_FORMAT_LENGTH]) description_length = len(message) - SUBASSET_FORMAT_LENGTH - compacted_subasset_length if description_length < 0: logger.warn("invalid subasset length: [issuance] tx [%s]: %s" % (tx['tx_hash'], compacted_subasset_length)) raise exceptions.UnpackError messages_format = '>{}s{}s'.format(compacted_subasset_length, description_length) compacted_subasset_longname, description = struct.unpack(messages_format, message[SUBASSET_FORMAT_LENGTH:]) subasset_longname = util.expand_subasset_longname(compacted_subasset_longname) callable_, call_date, call_price = False, 0, 0.0 try: description = description.decode('utf-8') except UnicodeDecodeError: description = '' elif (tx['block_index'] > 283271 or config.TESTNET or config.REGTEST) and len(message) >= LENGTH_2: # Protocol change. if len(message) - LENGTH_2 <= 42: curr_format = FORMAT_2 + '{}p'.format(len(message) - LENGTH_2) else: curr_format = FORMAT_2 + '{}s'.format(len(message) - LENGTH_2) asset_id, quantity, divisible, callable_, call_date, call_price, description = struct.unpack(curr_format, message) call_price = round(call_price, 6) # TODO: arbitrary try: description = description.decode('utf-8') except UnicodeDecodeError: description = '' else: if len(message) != LENGTH_1: raise exceptions.UnpackError asset_id, quantity, divisible = struct.unpack(FORMAT_1, message) callable_, call_date, call_price, description = False, 0, 0.0, '' try: asset = util.generate_asset_name(asset_id, tx['block_index']) status = 'valid' except exceptions.AssetIDError: asset = None status = 'invalid: bad asset name' except exceptions.UnpackError as e: asset, quantity, divisible, callable_, call_date, call_price, description = None, None, None, None, None, None, None status = 'invalid: could not unpack' # parse and validate the subasset from the message subasset_parent = None if status == 'valid' and subasset_longname is not None: # Protocol change. try: # ensure the subasset_longname is valid util.validate_subasset_longname(subasset_longname) subasset_parent, subasset_longname = util.parse_subasset_from_asset_name(subasset_longname) except exceptions.AssetNameError as e: asset = None status = 'invalid: bad subasset name' reissuance = None fee = 0 if status == 'valid': call_date, call_price, problems, fee, description, divisible, reissuance, reissued_asset_longname = validate(db, tx['source'], tx['destination'], asset, quantity, divisible, callable_, call_date, call_price, description, subasset_parent, subasset_longname, block_index=tx['block_index']) if problems: status = 'invalid: ' + '; '.join(problems) if not util.enabled('integer_overflow_fix', block_index=tx['block_index']) and 'total quantity overflow' in problems: quantity = 0 if tx['destination']: issuer = tx['destination'] transfer = True quantity = 0 else: issuer = tx['source'] transfer = False # Debit fee. if status == 'valid': util.debit(db, tx['source'], config.XCP, fee, action="issuance fee", event=tx['tx_hash']) # Lock? lock = False if status == 'valid': if description and description.lower() == 'lock': lock = True cursor = db.cursor() issuances = list(cursor.execute('''SELECT * FROM issuances \ WHERE (status = ? AND asset = ?) ORDER BY tx_index ASC''', ('valid', asset))) cursor.close() description = issuances[-1]['description'] # Use last description. (Assume previous issuance exists because tx is valid.) timestamp, value_int, fee_fraction_int = None, None, None if not reissuance: # Add to table of assets. bindings= { 'asset_id': str(asset_id), 'asset_name': str(asset), 'block_index': tx['block_index'], 'asset_longname': subasset_longname, } sql='insert into assets values(:asset_id, :asset_name, :block_index, :asset_longname)' issuance_parse_cursor.execute(sql, bindings) if status == 'valid' and reissuance: # when reissuing, add the asset_longname to the issuances table for API lookups asset_longname = reissued_asset_longname else: asset_longname = subasset_longname # Add parsed transaction to message-type–specific table. bindings= { 'tx_index': tx['tx_index'], 'tx_hash': tx['tx_hash'], 'block_index': tx['block_index'], 'asset': asset, 'quantity': quantity, 'divisible': divisible, 'source': tx['source'], 'issuer': issuer, 'transfer': transfer, 'callable': callable_, 'call_date': call_date, 'call_price': call_price, 'description': description, 'fee_paid': fee, 'locked': lock, 'status': status, 'asset_longname': asset_longname, } if "integer overflow" not in status: sql='insert into issuances values(:tx_index, :tx_hash, :block_index, :asset, :quantity, :divisible, :source, :issuer, :transfer, :callable, :call_date, :call_price, :description, :fee_paid, :locked, :status, :asset_longname)' issuance_parse_cursor.execute(sql, bindings) else: logger.warn("Not storing [issuance] tx [%s]: %s" % (tx['tx_hash'], status)) logger.debug("Bindings: %s" % (json.dumps(bindings), )) # Credit. if status == 'valid' and quantity: util.credit(db, tx['source'], asset, quantity, action="issuance", event=tx['tx_hash']) issuance_parse_cursor.close()
def parse(db, tx, message): order_parse_cursor = db.cursor() # Unpack message. try: if len(message) != LENGTH: raise exceptions.UnpackError give_id, give_quantity, get_id, get_quantity, expiration, fee_required = struct.unpack( FORMAT, message) give_asset = util.get_asset_name(db, give_id, tx['block_index']) get_asset = util.get_asset_name(db, get_id, tx['block_index']) status = 'open' except (exceptions.UnpackError, exceptions.AssetNameError, struct.error) as e: give_asset, give_quantity, get_asset, get_quantity, expiration, fee_required = 0, 0, 0, 0, 0, 0 status = 'invalid: could not unpack' price = 0 if status == 'open': try: price = util.price(get_quantity, give_quantity) except ZeroDivisionError: price = 0 # Overorder order_parse_cursor.execute( '''SELECT * FROM balances \ WHERE (address = ? AND asset = ?)''', (tx['source'], give_asset)) balances = list(order_parse_cursor) if give_asset != config.BTC: if not balances: give_quantity = 0 else: balance = balances[0]['quantity'] if balance < give_quantity: give_quantity = balance get_quantity = int(price * give_quantity) problems = validate(db, tx['source'], give_asset, give_quantity, get_asset, get_quantity, expiration, fee_required, tx['block_index']) if problems: status = 'invalid: ' + '; '.join(problems) if util.enabled('btc_order_minimum'): min_btc_quantity = 0.001 * config.UNIT # 0.001 BTC if (give_asset == config.BTC and give_quantity < min_btc_quantity ) or (get_asset == config.BTC and get_quantity < min_btc_quantity): if problems: status += '; btc order below minimum' else: status = 'invalid: btc order below minimum' # Debit give quantity. (Escrow.) if status == 'open': if give_asset != config.BTC: # No need (or way) to debit BTC. util.debit(db, tx['source'], give_asset, give_quantity, action='open order', event=tx['tx_hash']) # Add parsed transaction to message-type–specific table. bindings = { 'tx_index': tx['tx_index'], 'tx_hash': tx['tx_hash'], 'block_index': tx['block_index'], 'source': tx['source'], 'give_asset': give_asset, 'give_quantity': give_quantity, 'give_remaining': give_quantity, 'get_asset': get_asset, 'get_quantity': get_quantity, 'get_remaining': get_quantity, 'expiration': expiration, 'expire_index': tx['block_index'] + expiration, 'fee_required': fee_required, 'fee_required_remaining': fee_required, 'fee_provided': tx['fee'], 'fee_provided_remaining': tx['fee'], 'status': status, } if "integer overflow" not in status: sql = 'insert into orders values(:tx_index, :tx_hash, :block_index, :source, :give_asset, :give_quantity, :give_remaining, :get_asset, :get_quantity, :get_remaining, :expiration, :expire_index, :fee_required, :fee_required_remaining, :fee_provided, :fee_provided_remaining, :status)' order_parse_cursor.execute(sql, bindings) else: logger.warn("Not storing [order] tx [%s]: %s" % (tx['tx_hash'], status)) logger.debug("Bindings: %s" % (json.dumps(bindings), )) # Match. if status == 'open' and tx['block_index'] != config.MEMPOOL_BLOCK_INDEX: match(db, tx) order_parse_cursor.close()
def parse (db, tx, message): order_parse_cursor = db.cursor() # Unpack message. try: if len(message) != LENGTH: raise exceptions.UnpackError give_id, give_quantity, get_id, get_quantity, expiration, fee_required = struct.unpack(FORMAT, message) give_asset = util.get_asset_name(db, give_id, tx['block_index']) get_asset = util.get_asset_name(db, get_id, tx['block_index']) status = 'open' except (exceptions.UnpackError, exceptions.AssetNameError, struct.error) as e: give_asset, give_quantity, get_asset, get_quantity, expiration, fee_required = 0, 0, 0, 0, 0, 0 status = 'invalid: could not unpack' price = 0 if status == 'open': try: price = util.price(get_quantity, give_quantity) except ZeroDivisionError: price = 0 # Overorder order_parse_cursor.execute('''SELECT * FROM balances \ WHERE (address = ? AND asset = ?)''', (tx['source'], give_asset)) balances = list(order_parse_cursor) if give_asset != config.BTC: if not balances: give_quantity = 0 else: balance = balances[0]['quantity'] if balance < give_quantity: give_quantity = balance get_quantity = int(price * give_quantity) problems = validate(db, tx['source'], give_asset, give_quantity, get_asset, get_quantity, expiration, fee_required, tx['block_index']) if problems: status = 'invalid: ' + '; '.join(problems) if util.enabled('btc_order_minimum'): min_btc_quantity = 0.001 * config.UNIT # 0.001 BTC if (give_asset == config.BTC and give_quantity < min_btc_quantity) or (get_asset == config.BTC and get_quantity < min_btc_quantity): if problems: status += '; btc order below minimum' else: status = 'invalid: btc order below minimum' # Debit give quantity. (Escrow.) if status == 'open': if give_asset != config.BTC: # No need (or way) to debit BTC. util.debit(db, tx['source'], give_asset, give_quantity, action='open order', event=tx['tx_hash']) # Add parsed transaction to message-type–specific table. bindings = { 'tx_index': tx['tx_index'], 'tx_hash': tx['tx_hash'], 'block_index': tx['block_index'], 'source': tx['source'], 'give_asset': give_asset, 'give_quantity': give_quantity, 'give_remaining': give_quantity, 'get_asset': get_asset, 'get_quantity': get_quantity, 'get_remaining': get_quantity, 'expiration': expiration, 'expire_index': tx['block_index'] + expiration, 'fee_required': fee_required, 'fee_required_remaining': fee_required, 'fee_provided': tx['fee'], 'fee_provided_remaining': tx['fee'], 'status': status, } if "integer overflow" not in status: sql = 'insert into orders values(:tx_index, :tx_hash, :block_index, :source, :give_asset, :give_quantity, :give_remaining, :get_asset, :get_quantity, :get_remaining, :expiration, :expire_index, :fee_required, :fee_required_remaining, :fee_provided, :fee_provided_remaining, :status)' order_parse_cursor.execute(sql, bindings) else: logger.warn("Not storing [order] tx [%s]: %s" % (tx['tx_hash'], status)) logger.debug("Bindings: %s" % (json.dumps(bindings), )) # Match. if status == 'open' and tx['block_index'] != config.MEMPOOL_BLOCK_INDEX: match(db, tx) order_parse_cursor.close()