def from_bytes(b): """ Creates a BlockHeader object from a serialized bytestream. This function "eats" the bytestream and returns the remainder of the stream after deserializing the fields of the BlockHeader. Args: b (bytes): bytes beginning with the (4-byte) version. Returns: bh, b (tuple): A tuple containing two elements - a BlockHeader object and the remainder of the bytestream after deserialization. """ version, b = unpack_u32(b) prev_block_hash, b = Hash(b[0:32]), b[32:] merkle_root_hash, b = Hash(b[0:32]), b[32:] time, b = unpack_u32(b) bits, b = unpack_u32(b) nonce, b = unpack_u32(b) return ( BlockHeader(version, prev_block_hash, merkle_root_hash, time, bits, nonce), b )
def test_block(blocks_json): for block_json in blocks_json: # Why is it so f*ing hard to get a UNIX-time from a time string? a = arrow.get(block_json['time']) time = timegm(a.datetime.timetuple()) # TODO: Need to have a make_txn_from_json() method that's shared # between here and test_txn() txns = [txn_from_json(t) for t in block_json['transactions']] # Create a new Block object block = Block(block_json['height'], block_json['version'], Hash(block_json['previous_block_hash']), time, int(block_json['bits'], 16), block_json['nonce'], txns) block_hash = block.hash try: assert len(block.txns) == len(block_json['transactions']) assert block.height == block_json['height'] assert block.block_header.version == block_json['version'] assert block.block_header.prev_block_hash == Hash( block_json['previous_block_hash']) assert block.block_header.merkle_root_hash == Hash( block_json['merkle_root']) assert block.block_header.time == time assert block.block_header.bits == int(block_json['bits'], 16) assert block.block_header.nonce == block_json['nonce'] assert block_hash == Hash(block_json['hash']) assert block.block_header.valid except AssertionError as e: print(e) print("block height: %d" % (block.height)) print(" from json: %d" % (block_json['height'])) print(" version: %d" % (block.block_header.version)) print(" from json: %d" % (block_json['version'])) print(" prev_block_hash: %s" % (block.block_header.prev_block_hash)) print(" from json: %s" % (block_json['previous_block_hash'])) print(" merkle_root_hash: %s" % (block.block_header.merkle_root_hash)) print(" from json: %s" % (block_json['merkle_root'])) print(" time: %d" % (block.block_header.time)) print(" from json: %d" % (time)) print(" bits: %d" % (block.block_header.bits)) print(" from json: %d" % (int(block_json['bits'], 16))) print(" nonce: %d" % (block.block_header.nonce)) print(" from json: %d" % (block_json['nonce'])) raise
def _op_hash256(self): """ The input is hashed two times with SHA-256. """ self._check_stack_len(1) x = self._stack.pop() self._stack.append(bytes(Hash.dhash(x)))
def hash(self): """ Computes the hash of the transaction. Returns: dhash (bytes): Double SHA-256 hash of the serialized transaction. """ return Hash.dhash(bytes(self))
def hash(self): """ Computes the double SHA-256 hash of the serialized object. Returns: Hash: object containing the hash """ return Hash.dhash(bytes(self))
def txn_from_json(txn_json): inputs = [] for i in txn_json['inputs']: if 'output_hash' in i: outpoint = Hash(i['output_hash']) script = Script(bytes.fromhex(i['script_signature_hex'])) # Do this to test script de/serialization script._disassemble() inp = TransactionInput(outpoint, i['output_index'], script, i['sequence']) else: # Coinbase txn, we pass in a block version of 1 since the # coinbase script from api.chain.com already has the # height in there. Don't want our stuff to repack it in. inp = CoinbaseInput(txn_json['block_height'], bytes.fromhex(i['coinbase']), i['sequence'], 1) inputs.append(inp) outputs = [] for o in txn_json['outputs']: scr = Script(bytes.fromhex(o['script_hex'])) scr._disassemble() out = TransactionOutput(o['value'], scr) outputs.append(out) txn = Transaction(Transaction.DEFAULT_TRANSACTION_VERSION, inputs, outputs, txn_json['lock_time']) return txn
def get_transactions(self, address_list, limit=100, min_block=None): """ Provides transactions associated with each address in address_list. Args: address_list (list(str)): List of Base58Check encoded Bitcoin addresses. limit (int): Maximum number of transactions to return. min_block (int): Block height from which to start getting transactions. If None, will get transactions from the entire blockchain. Returns: dict: A dict keyed by address with each value being a list of Transaction objects. """ last_block_index = self.get_block_height() ret = defaultdict(list) total_items = limit for addresses in self._list_chunks(address_list, 199): fr = 0 to = min(100, limit) while fr < total_items: req = "addrs/" + ",".join(addresses) + \ "/txs?from=%d&to=%d" % (fr, to) r = self._request("GET", req) txn_data = r.json() if "totalItems" in txn_data: total_items = txn_data["totalItems"] fr = txn_data["to"] to = fr + 100 for data in txn_data['items']: if "vin" not in data or "vout" not in data: continue block_hash = None block = None if data['confirmations'] > 0: block = last_block_index - data['confirmations'] + 1 block_hash = Hash(data['blockhash']) metadata = dict(block=block, block_hash=block_hash, network_time=data.get("time", None), confirmations=data['confirmations']) if min_block and block: if block < min_block: continue txn, addr_keys = self.txn_from_json(data) for addr in addr_keys: if addr in addresses: ret[addr].append( dict(metadata=metadata, transaction=txn)) return ret
def get_transactions_by_id(self, ids): """ Gets transactions by their IDs. Args: ids (list(str)): List of TXIDs to retrieve. Returns: dict: A dict keyed by TXID of Transaction objects. """ last_block_index = self.get_block_height() ret = {} for txid in ids: r = self._request("GET", "tx/%s" % txid) data = r.json() if r.status_code == 200: if "vin" not in data or "vout" not in data: continue block_hash = None block = None if data['confirmations'] > 0: block = last_block_index - data['confirmations'] + 1 block_hash = Hash(data['blockhash']) metadata = dict(block=block, block_hash=block_hash, network_time=data['time'], confirmations=data['confirmations']) txn, _ = self.txn_from_json(data) assert str(txn.hash) == txid ret[txid] = dict(metadata=metadata, transaction=txn) return ret
def get_transactions_by_id(self, ids): """ Gets transactions by their IDs. Args: ids (list): List of TXIDs to retrieve. Returns: dict: A dict keyed by TXID of Transaction objects. """ ret = {} for txid in ids: r = self._request("GET", "transactions/%s" % txid) data = r.json() if r.status_code == 200: block_hash = None if data['block_hash']: block_hash = Hash(data['block_hash']) metadata = dict( block=data['block_height'], block_hash=block_hash, network_time=timegm( arrow.get( data['chain_received_at']).datetime.timetuple()), confirmations=data['confirmations']) txn, _ = self.txn_from_json(data) assert str(txn.hash) == txid ret[txid] = dict(metadata=metadata, transaction=txn) return ret
def test_op_checklocktimeverify(): prev_txn_hash = Hash( '6eae1e03964799c4e29039db459ea4fad4df57c2b06f730b60032a48fb075620') txn_input = TransactionInput(prev_txn_hash, 0, Script(""), 1) addr = "1HJiL6AYYmtkbJzC9bxAorznWijwNK5Z8E" out_script_pub_key = Script.build_p2pkh(utils.address_to_key_hash(addr)[1]) txn_output = TransactionOutput(9000, out_script_pub_key) # Create the txn txn = Transaction(Transaction.DEFAULT_TRANSACTION_VERSION, [txn_input], [txn_output], 367987) # This is one more (367988) so it should fail s = Script("0x749d05 OP_CHECKLOCKTIMEVERIFY") si = ScriptInterpreter(txn=txn, input_index=0, sub_script=out_script_pub_key) si.run_script(s) assert not si.valid # This is negative, so it should fail s = Script("0xfff74d05 OP_CHECKLOCKTIMEVERIFY") si = ScriptInterpreter(txn=txn, input_index=0, sub_script=out_script_pub_key) si.run_script(s) assert not si.valid # This is one less (367986) so it should pass s = Script("0x729d05 OP_CHECKLOCKTIMEVERIFY") si = ScriptInterpreter(txn=txn, input_index=0, sub_script=out_script_pub_key) si.run_script(s) assert not si.stop # Now reformulate the txn so that the input is finalized txn_input.sequence_num = 0xffffffff si.run_script(s) assert not si.valid # The last check is if there are mismatching notions of locktime txn_input.sequence_num = 1 txn.lock_time = 500000001 si = ScriptInterpreter(txn=txn, input_index=0, sub_script=out_script_pub_key) si.run_script(s) assert not si.valid
def set_txn_side_effect_for_hd_discovery(self): # For each used account, there are at least 2 calls required: # 1 for the first DISCOVERY_INCREMENT payout addresses and 1 # for the first DISCOVERY_INCREMENT change # addresses. Depending on the number of used addresses for the # account, this will change. effects = [] n = self._num_used_accounts if n == 0: n = 1 for acct_num in range(n): for change in [0, 1]: num_used = self._num_used_addresses[acct_num][change] r = math.ceil((num_used + HDAccount.GAP_LIMIT) / self.address_increment) k = 'change_addresses' if change else 'payout_addresses' addr_list = self._acct_keys[acct_num][k] if change: metadata = dict(block=234790 + r, block_hash=Hash("000000000000000007d57f03ebe36dbe4f87ab2f340e93b45999ab249b6dc0df"), confirmations=23890 - r) else: metadata = dict(block=None, block_hash=None, confirmations=0) if r == 0: r = 1 for i in range(r): start = i * self.address_increment end = (i + 1) * self.address_increment addr_range = range(start, end) out = TransactionOutput(value=100000, script=Script.build_p2pkh( address_to_key_hash( addr_list[i])[1])) dummy_txn = Transaction(1, [], [out], 0) m = MockTxnDict(num_used=num_used, addr_range=addr_range, addr_list=addr_list, used_value=[dict(metadata=metadata, transaction=dummy_txn)], unused_value=[]) effects.append(m) self.get_transactions.side_effect = effects return len(effects)
def _complete_merkle_edge(self): if self._cb_txn is None: # TODO: raise an error? return cur_hash = self._cb_txn.hash for e in self.merkle_edge: cur_hash = Hash.dhash(bytes(cur_hash) + bytes(e)) self.block_header.merkle_root_hash = cur_hash
def __init__(self, height, version, prev_block_hash, time, bits, nonce, txns): self.block_header = BlockHeader(version, prev_block_hash, Hash(bytes(32)), # Fake merkle_root for now time, bits, nonce) # Fake nonce also self.height = height self.txns = txns self.merkle_tree = None self.invalidate()
def test_smallest_first(): utxos_by_addr = {} addr_base = "1LQ1TjCKJN8GXsYtsqnREqs5Z4eaPCu5p" h = Hash( "a2972893f1be1f54d68a9228d9706ff8f202bb80f488f4dd46c0fe37c1e42415") for i in range(10): addr = addr_base + str(i) utxos_by_addr[addr] = [ UnspentTransactionOutput(transaction_hash=h, outpoint_index=random.randint(0, 10), value=i * 20000, scr=Script(""), confirmations=10) ] utxos = [] for addr, addr_utxos in utxos_by_addr.items(): utxos += addr_utxos amount = 1000000 selected, fees = utxo_selector_smallest_first(utxos_by_addr=utxos_by_addr, amount=amount, num_outputs=2) assert not selected amount = 100000 selected, fees = utxo_selector_smallest_first(utxos_by_addr=utxos_by_addr, amount=amount, num_outputs=2) sum_selected = 0 remaining = [] selected_list = [] for addr, utxo_list in selected.items(): sum_selected += sum([utxo.value for utxo in utxo_list]) selected_list += utxo_list assert sum_selected >= amount remaining = [u for u in utxos if u not in selected_list] largest_selected = 0 for s in selected_list: if s.value > largest_selected: largest_selected = s.value # Make sure that the largest of the selected is <= min(remaining) assert remaining == [] or largest_selected <= min( [u.value for u in remaining])
def mine_work(work_msg, enonce1, enonce2_size): """ Mine the work using a CPU to find a valid solution. Loop until the CPU finds a valid solution of the given work. Todo: Slow down the click echo when on a 21BC. Args: work_msg (WorkNotification): the work given by the pool API enonce1 (bytes): extra nonce required to make the coinbase transaction enonce2_size (int): size of the extra nonce 2 in bytes """ pool_target = utils.bits_to_target(work_msg.bits_pool) for enonce2_num in range(0, 2**(enonce2_size * 8)): enonce2 = enonce2_num.to_bytes(enonce2_size, byteorder="big") cb_txn, _ = Transaction.from_bytes(work_msg.coinb1 + enonce1 + enonce2 + work_msg.coinb2) cb = CompactBlock( work_msg.height, work_msg.version, Hash(work_msg.prev_block_hash), work_msg.ntime, work_msg.nbits, # lower difficulty work for testing work_msg.merkle_edge, cb_txn) row_counter = 0 for nonce in range(0xffffffff): if nonce % 6e3 == 0: logger.info(click.style(u'█', fg='green'), nl=False) row_counter += 1 if row_counter > 40: row_counter = 0 logger.info("") cb.block_header.nonce = nonce h = cb.block_header.hash.to_int('little') if h < pool_target: share = Share(enonce2=enonce2, nonce=nonce, work_id=work_msg.work_id, otime=int(time.time())) # adds a new line at the end of progress bar logger.info("") return share logger.info("Exhausted enonce1 space. Changing enonce2")
def __init__(self, height, version, prev_block_hash, time, bits, merkle_edge, cb_txn=None): self.block_header = BlockHeader(version, prev_block_hash, Hash(bytes(32)), # Fake merkle_root for now time, bits, 0) # Fake nonce also self.height = height self.merkle_edge = merkle_edge if cb_txn is not None: self.coinbase_transaction = cb_txn else: self._cb_txn = None
def set_txn_side_effect_for_index(self, account_index, address_index, change): dummy_txn = Transaction(1, [], [], 0) metadata = dict(block_height=234790, block_hash=Hash("000000000000000007d57f03ebe36dbe4f87ab2f340e93b45999ab249b6dc0df"), confirmations=23890) k = 'change_addresses' if change else 'payout_addresses' addr_list = self._acct_keys[account_index][k] mtd = MockTxnDict(num_used=address_index + 1, addr_range=range(address_index, address_index + 1), addr_list=addr_list, used_value=[dict(metadata=metadata, transaction=dummy_txn)], unused_value=[]) self.get_transactions.side_effect = [mtd]
def get_utxos(self, addresses, include_unconfirmed=False): """ Returns a dict containing the UTXOs for the desired addresses Args: addresses (list): List of addresses to get balances for include_unconfirmed (bool): True if unconfirmed transactions should be included in the balance. Returns: dict: Keys are addresses, values are lists of UnspentTransactionOutput objects for the address. """ unconfirmed_mask = self.UNSPENT | self.UNCONFIRMED | self.PROVISIONAL rv = {} for addr in addresses: # Get the list of unspent deposits. if addr not in self._deposits_for_addr: continue for txid in self._deposits_for_addr[addr]: for i in self._deposits_for_addr[addr][txid]: # Look up the status in the outputs cache if txid not in self._outputs_cache or \ i not in self._outputs_cache[txid]: raise Exception("Don't have information for %r:%r" % (txid, i)) status = self._outputs_cache[txid][i]['status'] if status & self.UNSPENT: if (status & unconfirmed_mask and include_unconfirmed) or \ (status == self.UNSPENT and not include_unconfirmed): out = self._outputs_cache[txid][i]['output'] utxo = UnspentTransactionOutput( transaction_hash=Hash(txid), outpoint_index=i, value=out.value, scr=out.script, confirmations=self._txn_cache[txid]. confirmations) if addr not in rv: rv[addr] = [] rv[addr].append(utxo) return rv
def _invalidate_coinbase(self, merkle_node=None): if merkle_node is None: merkle_node = self.merkle_tree if(merkle_node.left_child is None and merkle_node.right_child is None): # This is the node corresponding to the coinbase, update hash merkle_node.hash = self.coinbase_transaction.hash return else: self._invalidate_coinbase(merkle_node.left_child) merkle_node.hash = Hash.dhash(bytes(merkle_node.left_child.hash) + bytes(merkle_node.right_child.hash)) # If we're back at the root, update the blockheader if merkle_node is self.merkle_tree: self.block_header.merkle_root_hash = self.merkle_tree.hash
def get_transactions(self, address_list, limit=100, min_block=None): """ Provides transactions associated with each address in address_list. Args: address_list (list): List of Base58Check encoded Bitcoin addresses. limit (int): Maximum number of transactions to return. min_block (int): Block height from which to start getting transactions. If None, will get transactions from the entire blockchain. Returns: dict: A dict keyed by address with each value being a list of Transaction objects. """ ret = defaultdict(list) for addresses in self._list_chunks(address_list, 199): path = "addresses/" + ",".join(addresses) \ + "/transactions?limit={}".format(limit) if min_block: path += "&min_block={}".format(min_block) r = self._request("GET", path) txn_data = r.json() for data in txn_data: block_hash = None if data['block_hash']: block_hash = Hash(data['block_hash']) metadata = dict( block=data['block_height'], block_hash=block_hash, network_time=timegm( arrow.get( data['chain_received_at']).datetime.timetuple()), confirmations=data['confirmations']) txn, addr_keys = self.txn_from_json(data) for addr in addr_keys: if addr in addresses: ret[addr].append( dict(metadata=metadata, transaction=txn)) return ret
def _deserialize(wt_dict): # Private, only for internal wallet usage wt = WalletTransaction.from_hex(wt_dict['transaction']) if 'metadata' in wt_dict: m = wt_dict['metadata'] else: m = wt_dict wt.block = m['block'] if m['block_hash'] is not None: wt.block_hash = Hash(m['block_hash']) wt.confirmations = m['confirmations'] wt.network_time = m['network_time'] if 'value' in m: wt.value = m['value'] if 'fees' in m: wt.fees = m['fees'] if 'provisional' in m: wt.provisional = m['provisional'] return wt
def get_signature_for_input(self, input_index, hash_type, private_key, sub_script): """ Returns the signature for an input. This function only returns the signature for an input, it does not insert the signature into the script member of the input. It also does not validate that the given private key matches any public keys in the sub_script. Args: input_index (int): The index of the input to sign. hash_type (int): What kind of signature hash to do. private_key (crypto.PrivateKey): private key with which to sign the transaction. sub_script (Script): the scriptPubKey of the corresponding utxo being spent if the outpoint is P2PKH or the redeem script if the outpoint is P2SH. Returns: tuple: A tuple containing the signature object and the message that was signed. """ if input_index < 0 or input_index >= len(self.inputs): raise ValueError("Invalid input index.") tmp_script = sub_script.remove_op("OP_CODESEPARATOR") if hash_type & 0x1f == self.SIG_HASH_SINGLE and len(self.inputs) > len(self.outputs): # This is to deal with the bug where specifying an index # that is out of range (wrt outputs) results in a # signature hash of 0x1 (little-endian) msg_to_sign = 0x1.to_bytes(32, 'little') else: txn_copy = self._copy_for_sig(input_index, hash_type, tmp_script) msg_to_sign = bytes(Hash.dhash(bytes(txn_copy) + pack_u32(hash_type))) sig = private_key.sign(msg_to_sign, False) return sig, msg_to_sign
def _compute_merkle_tree(self): """ Computes the merkle tree from the transactions in self.txns. The merkle root is the top node in the tree and can be accessed as self.merkle_tree.merkle_hash. """ # Tree gets built bottom up level_nodes = [MerkleNode(t.hash, None, None) for t in self.txns] while True: if len(level_nodes) == 1: self.merkle_tree = level_nodes[0] # This is the root return if len(level_nodes) % 2 != 0: # Make sure there are an even number of nodes level_nodes.append(level_nodes[-1]) new_level = [] for i in range(0, len(level_nodes), 2): left = level_nodes[i] right = level_nodes[i+1] n = MerkleNode(Hash.dhash(bytes(left.hash) + bytes(right.hash)), left, right) new_level.append(n) level_nodes = new_level
def _compute_merkle_tree(self): """ Computes the merkle tree from the transactions in self.transactions. The merkle root is the top node in the tree and can be accessed as self.merkle_tree.merkle_hash. """ # Tree gets built bottom up level_nodes = [MerkleNode(t.hash, None, None) for t in self.txns] while True: if len(level_nodes) == 1: self.merkle_tree = level_nodes[0] # This is the root return if len(level_nodes) % 2 != 0: # Make sure there are an even number of nodes level_nodes.append(level_nodes[-1]) new_level = [] for i in range(0, len(level_nodes), 2): left = level_nodes[i] right = level_nodes[i+1] n = MerkleNode(Hash.dhash(bytes(left.hash) + bytes(right.hash)), left, right) new_level.append(n) level_nodes = new_level
def from_bytes(b): """ Deserializes a byte stream into a TransactionInput. Args: b (bytes): byte stream starting with the outpoint. Returns: tuple: First element of the tuple is the TransactionInput object and the second is the remaining byte stream. """ outpoint = b[0:32] outpoint_index, b1 = unpack_u32(b[32:]) script, b1 = Script.from_bytes(b1) sequence_num, b1 = unpack_u32(b1) return ( TransactionInput(Hash(outpoint), outpoint_index, script, sequence_num), b1 )
class CoinbaseInput(TransactionInput): """ See https://bitcoin.org/en/developer-reference#coinbase Args: height (uint): The height of the block coinbase is part of will go into. Not required for version 1 blocks. raw_script (bytes): the bytes of the coinbase script. For block_version > 1 the height portion should NOT be included in this script. sequence (int): Unless you are Satoshi with a version 1 block, the default is fine. If you are Satoshi, send me some of your private keys and set this to 0. block_version (int): The version of the block this coinbase is a part of or will go into. If raw_script already contains the height of the block, this must be 1. """ NULL_OUTPOINT = Hash(bytes(32)) MAX_INT = 0xffffffff def __init__(self, height, raw_script, sequence=MAX_INT, block_version=3): self.height = height if block_version == 1: scr = raw_script else: scr = Script.build_push_int(self.height) + raw_script # Coinbase scripts are basically whatever, so we don't # try to create a script object from them. super().__init__(self.NULL_OUTPOINT, self.MAX_INT, scr, sequence) def get_addresses(self, testnet=False): """ Returns all addresses associated with the script in this input. Args: testnet (bool): True if the transaction is a testnet transaction. Returns: list (str): A list of all addresses found in the script. """ return [] def __str__(self): """ Returns a human readable formatting of this input. Returns: s (str): A string containing the human readable input. """ return ( "CoinbaseInput(" + "Outpoint: %s " % (self.outpoint) + "Outpoint Index: 0x%08x " % (self.outpoint_index) + "Script: %s " % (bytes_to_str(self.script)) + "Sequence: 0x%08x)" % (self.sequence_num)) def __bytes__(self): """ Serializes the object into a byte stream. Returns: b (bytes): byte stream containing the serialized coinbase input. """ return ( bytes(self.outpoint) + pack_u32(self.outpoint_index) + pack_var_str(self.script) + pack_u32(self.sequence_num) )
def txn_from_json(txn_json): """ Returns a new Transaction from a JSON-serialized transaction Args: txn_json: JSON with the following format: { "hash": "0bf0de38c26195919179f...", "block_hash": "000000000000000...", "block_height": 303404, "block_time": "2014-05-30T23:54:55Z", "chain_received_at": "2015-08-13T10:52:21.718Z", "confirmations": 69389, "lock_time": 0, "inputs": [ { "transaction_hash": "0bf0de38c2619...", "output_hash": "b84a66c46e24fe71f9...", "output_index": 0, "value": 300000, "addresses": [ "3L7dKYQGNoZub928CJ8NC2WfrM8U8GGBjr" ], "script_signature": "03046022100de7b67b9...", "script_signature_hex": "00493046022100de7b...", "sequence": 4294967295 } ], "outputs": [ { "transaction_hash": "0bf0de38c261959...", "output_index": 0, "value": 290000, "addresses": [ "1K4nPxBMy6sv7jssTvDLJWk1ADHBZEoUVb" ], "script": "OP_DUP OP_HASH160 c629680b8d...", "script_hex": "76a914c629680b8d13...", "script_type": "pubkeyhash", "required_signatures": 1, "spent": false, "spending_transaction": null } ], "fees": 10000, "amount": 290000 }, Transaction.DEFAULT_TRANSACTION_VERSION Returns: two1.bitcoin.Transaction: a deserialized transaction derived from the provided json. """ inputs = [] outputs = [] addr_keys = set() for i in txn_json["inputs"]: if 'coinbase' in i: inputs.append( CoinbaseInput(height=txn_json["block_height"] or 0, raw_script=bytes.fromhex(i['coinbase']), sequence=i['sequence'], block_version=1)) else: # Script length etc. are not returned so we need to # prepend that. script, _ = Script.from_bytes( pack_var_str(bytes.fromhex(i["script_signature_hex"]))) inputs.append( TransactionInput(Hash(i["output_hash"]), i["output_index"], script, i["sequence"])) if "addresses" in i: addr_keys.add(i["addresses"][0]) for i in txn_json["outputs"]: script, _ = Script.from_bytes( pack_var_str(bytes.fromhex(i["script_hex"]))) outputs.append(TransactionOutput(i["value"], script)) if "addresses" in i: addr_keys.add(i["addresses"][0]) txn = Transaction(Transaction.DEFAULT_TRANSACTION_VERSION, inputs, outputs, txn_json["lock_time"]) return txn, addr_keys
def test_txns(): txn = WalletTransaction.from_hex( '01000000029ccb0665ec780f8b05bf2315a48dfb154dc41f91e8046a59f1c75656826dea5d000000006b483045022100f4d2161473f9d0ba4b5cdbc9e5b7b1d8fca32e3b6bede307352bef6aaa3a08cd022023d8444f78f69de6fd0f6cc391a7ca4de3dc4181220932d01511eb1129fee09e01210328bd51733a7d5bee05368680adef9aaa3f9bb716ec716d5896b1d80afb734d6cffffffff2424cb910235b2059d59023aecfebf6fce4eee31c637e9a0b350491849688727020000006a473044022072de3d707f98adfed3266e0261750cd7b5162732e525d7df17f4e55a55e953b902205046b597acf7acf41e725b459ba6cfe8c03a9d877375cdf483cab9620f92961101210291cbb1304614d86b15f4e8f39e9d8299cd0304ff8b81b5bcf6d9a6f32be649bbffffffff0240420f00000000001976a91434fe777d676fceb3509584c1d7b9f13ee56514d488ace05a0000000000001976a9145237ba33122495420711b3f2cc0463dbb24c9d3988ac00000000' ) # nopep8 txn.block = 374440 txn.block_hash = Hash( '0000000000000000038ee0066680705455d500f287f6c56db7a979c2426a4c02') txn.confirmations = 7533 cm.insert_txn(txn) txid = "3779f27a81cdbc435ac258ce5076c211e7a953027aab42573b1b7ce9e50abe8e" assert txid in cm._txn_cache in_addrs = [ "1DpCouKa2evX3f2aELUy7iNdsrYuLLaqWy", "1GcmBmvYWJKLFHxrTtx5DqQLV7oHQAkH2c" ] out_addrs = [("15qCydrcqURADXJHrtMW9m6SpPTa3kqkQb", 1000000), ("18VjAjZ7Au8U75LCHT7aH7mTwKETZwHTpi", 23264)] assert len(cm._txns_by_addr.keys()) == 4 for i, a in enumerate(out_addrs): assert a[0] in cm._txns_by_addr assert list(cm._deposits_for_addr[a[0]][txid]) == [i] for i, a in enumerate(in_addrs): assert list(cm._spends_for_addr[a][txid]) == [i] # Check input and output caches assert txid in cm._inputs_cache assert len(cm._inputs_cache[txid]) == 2 assert txid in cm._outputs_cache assert len(cm._outputs_cache[txid]) == 2 assert cm._outputs_cache[txid][0]['output'] is not None assert cm._outputs_cache[txid][1]['output'] is not None assert cm._outputs_cache[txid][0]['status'] == CacheManager.UNSPENT assert cm._outputs_cache[txid][1]['status'] == CacheManager.UNSPENT out_txid1 = "5dea6d825656c7f1596a04e8911fc44d15fb8da41523bf058b0f78ec6506cb9c" assert out_txid1 in cm._outputs_cache assert cm._outputs_cache[out_txid1][0]['status'] == CacheManager.SPENT out_txid2 = "27876849184950b3a0e937c631ee4ece6fbffeec3a02599d05b2350291cb2424" assert out_txid2 in cm._outputs_cache assert len(cm._outputs_cache[out_txid2].keys()) == 1 assert cm._outputs_cache[out_txid2][2]['status'] == CacheManager.SPENT assert cm.has_txns() assert cm.has_txns(0) assert not cm.has_txns(1) assert cm.have_transaction(txid) assert not cm.have_transaction(out_txid1) assert not cm.have_transaction(out_txid2) assert cm.get_transaction(txid) == txn assert cm.get_transaction(out_txid1) is None assert cm.get_transaction(out_txid2) is None # Check balances on addresses addr_balances = cm.get_balances([a[0] for a in out_addrs]) for addr, exp_bal in out_addrs: assert addr_balances[addr] == exp_bal # Add a second transaction that deposits into addresses we have, # but insert it as unconfirmed txn_hex = "01000000028a9acc005a2158758e44242eee8c18fee7a43cda39a358cc783fb578cfa7cf5f000000006a47304402204a00fcb746f90095c1c50e048f1b0616b421617ca27a7a7465d4086a1623731802202404d0fce1b74f41ce1e3c63f61c8574c8cf2a5eae24ac4df714775168a9118c012102d8bfe3fd2d01f3a2b1380c34ccadcd318cafd1246f41258d7d244f409fb44c93ffffffff15857ef158778f603d34bcff74bd7935cb9d6b4a0147eea008be3f67bd395830020000006a4730440220466f93d784aa24bf497929433777fa283a7cd0000625179d3e9f5c75db4ad10f022022857a607665408a5521cbdf145c1fecd85c427ccf939c49f9a0d828a934e2530121021b5c9a9e6c97b4222c97da5a642e3531bce01b757cfdc9b29ac7d1cbf2d10710ffffffff0240420f00000000001976a91433a0a86dd9dab9902157d8d64e05fc8e0dfba16388ac7a1d0300000000001976a914134ca7427089b8f661efc9806a8418f72e57167f88ac00000000" # nopep8 txn = WalletTransaction.from_hex(txn_hex) cm.insert_txn(txn) txid = "d24f3b9f0aa7b6484bcea563f4c254bd24e8163906cbffc727c2b2dad43af61e" assert txid in cm._txn_cache in_addrs = [ "1Ezv6YmYsZvALUaRcZRf8hBdxYni6cm78X", "16Mcvb7fYhif94d1RHCn5AE2dm1oXCGnH6" ] out_addrs = [("15hyvVXH2eJnakwhpqKBf5oTCa3o2bp8m8", 1000000), ("12m3fcaabUgYwWcodgVZUGH6ntFqVrHk5C", 204154)] for i, a in enumerate(out_addrs): assert a[0] in cm._txns_by_addr assert list(cm._deposits_for_addr[a[0]][txid]) == [i] for i, a in enumerate(in_addrs): assert list(cm._spends_for_addr[a][txid]) == [i] # Check input and output caches assert txid in cm._inputs_cache assert len(cm._inputs_cache[txid]) == 2 assert txid in cm._outputs_cache assert cm._outputs_cache[txid][0]['output'] is not None assert cm._outputs_cache[txid][1]['output'] is not None assert cm._outputs_cache[txid][0][ 'status'] == CacheManager.UNSPENT | CacheManager.UNCONFIRMED assert cm._outputs_cache[txid][1][ 'status'] == CacheManager.UNSPENT | CacheManager.UNCONFIRMED out_txid1 = "5fcfa7cf78b53f78cc58a339da3ca4e7fe188cee2e24448e7558215a00cc9a8a" assert out_txid1 in cm._outputs_cache assert cm._outputs_cache[out_txid1][0][ 'status'] == CacheManager.SPENT | CacheManager.UNCONFIRMED out_txid2 = "305839bd673fbe08a0ee47014a6b9dcb3579bd74ffbc343d608f7758f17e8515" assert out_txid2 in cm._outputs_cache assert len(cm._outputs_cache[out_txid2].keys()) == 1 assert cm._outputs_cache[out_txid2][2][ 'status'] == CacheManager.SPENT | CacheManager.UNCONFIRMED # Check that confirmed balances are 0 for the out_addrs out_a = [a[0] for a in out_addrs] conf_addr_balances = cm.get_balances(out_a) unconf_addr_balances = cm.get_balances(out_a, True) for addr, exp_bal in out_addrs: assert conf_addr_balances[addr] == 0 assert unconf_addr_balances[addr] == exp_bal # Check utxos conf_addr_utxos = cm.get_utxos(out_a) unconf_addr_utxos = cm.get_utxos(out_a, True) for addr, exp_bal in out_addrs: assert addr not in conf_addr_utxos assert addr in unconf_addr_utxos assert len(unconf_addr_utxos[addr]) == 1 utxo = unconf_addr_utxos[addr][0] assert utxo.value == exp_bal assert utxo.num_confirmations == 0 # Reinsert the transaction with it as confirmed now txn = WalletTransaction.from_hex(txn_hex) txn.block = 374442 txn.block_hash = Hash( '000000000000000001de250dcfa47f8313aec2f1f41a56f4fb0d099eb497c2b2') txn.confirmations = 7684 cm.insert_txn(txn) assert cm._outputs_cache[txid][0]['status'] == CacheManager.UNSPENT assert cm._outputs_cache[txid][1]['status'] == CacheManager.UNSPENT assert cm._outputs_cache[out_txid1][0]['status'] == CacheManager.SPENT assert cm._outputs_cache[out_txid2][2]['status'] == CacheManager.SPENT # Check the balances again conf_addr_balances = cm.get_balances(out_a) unconf_addr_balances = cm.get_balances(out_a, True) for addr, exp_bal in out_addrs: assert conf_addr_balances[addr] == exp_bal assert unconf_addr_balances[addr] == exp_bal # Check utxos again conf_addr_utxos = cm.get_utxos(out_a) unconf_addr_utxos = cm.get_utxos(out_a, True) for addr, exp_bal in out_addrs: assert addr in conf_addr_utxos assert addr in unconf_addr_utxos assert len(conf_addr_utxos[addr]) == 1 assert len(unconf_addr_utxos[addr]) == 1 utxo = conf_addr_utxos[addr][0] assert utxo.value == exp_bal assert utxo.num_confirmations == 7684 # Insert a transaction that spends from one of the out addrs in # the above transactions. # 1. Insert it provisionally # 2. Re-insert as unconfirmed # 3. Re-insert as confirmed txn_hex = "01000000021ef63ad4dab2c227c7ffcb063916e824bd54c2f463a5ce4b48b6a70a9f3b4fd2000000006a473044022051008f06f1fc5783364712c7bf175c383ebb92c1001ba9f744f5170d5af00bb9022012baa83b3611b2c0e637d2f5e62dd3f6f4debfca805f8a42df6719a67614824d0121027fc10ccde9240463a86c983d2c8d1301311c9debf510119418b0da7b6fdb7ee7ffffffff8ebe0ae5e97c1b3b5742ab7a0253a9e711c27650ce58c25a43bccd817af27937000000006a473044022076fd5835628d4867b489c4c7afa885de33417a3536276b3f7066155b1bd79c15022030a218c2ca35b27e2beefb2298a0bf6fc9eabe93e07f388a1a3aee878025a7b6012102bed99adff9710dbc3e9f7966037d5824ffb134aeba70aec70e34e7eeb6547a94ffffffff0240420f00000000001976a914952e023bf19047e9a014af4ec067667695d8c99488acf8340f00000000001976a914743281d388add04da28e10a12af09c853f98609888ac00000000" # nopep8 txn = WalletTransaction.from_hex(txn_hex) # First test with a very short expiration cm.insert_txn(txn, mark_provisional=True, expiration=1) txid = "6fd3c96d466cd465b40e59be14d023c27f1d0ca13075119d3d6baeebfc587b8c" assert txid in cm._txn_cache assert cm._txn_cache[txid].provisional time.sleep(1.5) cm.prune_provisional_txns() assert txid not in cm._txn_cache # Now do default expiration cm.insert_txn(txn, mark_provisional=True) assert txid in cm._txn_cache in_addrs = [ "15hyvVXH2eJnakwhpqKBf5oTCa3o2bp8m8", "15qCydrcqURADXJHrtMW9m6SpPTa3kqkQb" ] out_addrs = [("1EbnoKrmUEe3hsK9gTVfgYAming6BuqM3L", 1000000), ("1BbPtYsbBPFRCwnU5RuMTttraghXQ5JSZm", 996600)] for i, a in enumerate(out_addrs): assert a[0] in cm._txns_by_addr assert list(cm._deposits_for_addr[a[0]][txid]) == [i] for i, a in enumerate(in_addrs): assert list(cm._spends_for_addr[a][txid]) == [i] # Check input and output caches assert txid in cm._inputs_cache assert len(cm._inputs_cache[txid]) == 2 assert txid in cm._outputs_cache assert cm._outputs_cache[txid][0]['output'] is not None assert cm._outputs_cache[txid][1]['output'] is not None assert cm._outputs_cache[txid][0][ 'status'] == CacheManager.UNSPENT | CacheManager.PROVISIONAL | CacheManager.UNCONFIRMED # nopep8 assert cm._outputs_cache[txid][1][ 'status'] == CacheManager.UNSPENT | CacheManager.PROVISIONAL | CacheManager.UNCONFIRMED # nopep8 out_txid1 = "d24f3b9f0aa7b6484bcea563f4c254bd24e8163906cbffc727c2b2dad43af61e" assert out_txid1 in cm._outputs_cache assert cm._outputs_cache[out_txid1][0][ 'status'] == CacheManager.SPENT | CacheManager.PROVISIONAL | CacheManager.UNCONFIRMED # nopep8 out_txid2 = "3779f27a81cdbc435ac258ce5076c211e7a953027aab42573b1b7ce9e50abe8e" assert out_txid2 in cm._outputs_cache assert len(cm._outputs_cache[out_txid2].keys()) == 2 assert cm._outputs_cache[out_txid2][0][ 'status'] == CacheManager.SPENT | CacheManager.PROVISIONAL | CacheManager.UNCONFIRMED # nopep8 # Check that confirmed balances are 0 for the out_addrs out_a = [a[0] for a in out_addrs] conf_addr_balances = cm.get_balances(out_a) unconf_addr_balances = cm.get_balances(out_a, True) for addr, exp_bal in out_addrs: assert conf_addr_balances[addr] == 0 assert unconf_addr_balances[addr] == exp_bal # Check utxos conf_addr_utxos = cm.get_utxos(out_a) unconf_addr_utxos = cm.get_utxos(out_a, True) for addr, exp_bal in out_addrs: assert addr not in conf_addr_utxos assert addr in unconf_addr_utxos assert len(unconf_addr_utxos[addr]) == 1 utxo = unconf_addr_utxos[addr][0] assert utxo.value == exp_bal assert utxo.num_confirmations == 0 # Re-insert as unconfirmed txn = WalletTransaction.from_hex(txn_hex) cm.insert_txn(txn, mark_provisional=False) # Only the statuses should change, so check those. assert cm._outputs_cache[txid][0][ 'status'] == CacheManager.UNSPENT | CacheManager.UNCONFIRMED assert cm._outputs_cache[txid][1][ 'status'] == CacheManager.UNSPENT | CacheManager.UNCONFIRMED assert cm._outputs_cache[out_txid1][0][ 'status'] == CacheManager.SPENT | CacheManager.UNCONFIRMED assert cm._outputs_cache[out_txid2][0][ 'status'] == CacheManager.SPENT | CacheManager.UNCONFIRMED # Re-insert as confirmed txn = WalletTransaction.from_hex(txn_hex) txn.block = 374445 txn.block_hash = Hash( "000000000000000004c241778cbbc269e912df5fe8d856efaea916daa82d2575") txn.confirmations = 7781 cm.insert_txn(txn, mark_provisional=False) # Only the statuses should change, so check those. assert cm._outputs_cache[txid][0]['status'] == CacheManager.UNSPENT assert cm._outputs_cache[txid][1]['status'] == CacheManager.UNSPENT assert cm._outputs_cache[out_txid1][0]['status'] == CacheManager.SPENT assert cm._outputs_cache[out_txid2][0]['status'] == CacheManager.SPENT # Check balances out_a = [a[0] for a in out_addrs] conf_addr_balances = cm.get_balances(out_a) unconf_addr_balances = cm.get_balances(out_a, True) for addr, exp_bal in out_addrs: assert conf_addr_balances[addr] == exp_bal assert unconf_addr_balances[addr] == exp_bal # Check utxos conf_addr_utxos = cm.get_utxos(out_a) unconf_addr_utxos = cm.get_utxos(out_a, True) for addr, exp_bal in out_addrs: assert addr in conf_addr_utxos assert addr in unconf_addr_utxos assert len(unconf_addr_utxos[addr]) == 1 utxo = conf_addr_utxos[addr][0] assert utxo.value == exp_bal assert utxo.num_confirmations == 7781 # Check utxos for all addresses that have deposits - we should only have 4 addrs = [ "1DpCouKa2evX3f2aELUy7iNdsrYuLLaqWy", "1GcmBmvYWJKLFHxrTtx5DqQLV7oHQAkH2c", "15hyvVXH2eJnakwhpqKBf5oTCa3o2bp8m8", "15qCydrcqURADXJHrtMW9m6SpPTa3kqkQb", "1EbnoKrmUEe3hsK9gTVfgYAming6BuqM3L", "1BbPtYsbBPFRCwnU5RuMTttraghXQ5JSZm", "1Ezv6YmYsZvALUaRcZRf8hBdxYni6cm78X", "16Mcvb7fYhif94d1RHCn5AE2dm1oXCGnH6", "12m3fcaabUgYwWcodgVZUGH6ntFqVrHk5C", "18VjAjZ7Au8U75LCHT7aH7mTwKETZwHTpi" ] conf_utxos = cm.get_utxos(addrs) assert len(conf_utxos) == 4 utxo_addrs_values = [("18VjAjZ7Au8U75LCHT7aH7mTwKETZwHTpi", 23264), ("12m3fcaabUgYwWcodgVZUGH6ntFqVrHk5C", 204154), ("1EbnoKrmUEe3hsK9gTVfgYAming6BuqM3L", 1000000), ("1BbPtYsbBPFRCwnU5RuMTttraghXQ5JSZm", 996600)] for a, value in utxo_addrs_values: assert a in conf_utxos assert len(conf_utxos[a]) == 1 assert conf_utxos[a][0].value == value assert "15hyvVXH2eJnakwhpqKBf5oTCa3o2bp8m8" not in conf_utxos assert "15qCydrcqURADXJHrtMW9m6SpPTa3kqkQb" not in conf_utxos # Now delete the last transaction cm._delete_txn(txid) assert txid not in cm._txn_cache assert txid not in cm._inputs_cache assert txid not in cm._outputs_cache for in_addr in in_addrs: assert in_addr not in cm._spends_for_addr for out_addr, _ in out_addrs: assert out_addr not in cm._deposits_for_addr for out_txid, index in [(out_txid1, 0), (out_txid2, 0)]: out = cm._outputs_cache[out_txid][index] assert out['status'] == CacheManager.UNSPENT assert out['spend_txid'] is None assert out['spend_index'] is None
def txn_from_json(txn_json): """ Returns a new Transaction from a JSON-serialized transaction Args: txn_json: JSON with the following format: { "hash": "0bf0de38c26195919179f...", "block_hash": "000000000000000...", "block_height": 303404, "block_time": "2014-05-30T23:54:55Z", "chain_received_at": "2015-08-13T10:52:21.718Z", "confirmations": 69389, "lock_time": 0, "inputs": [ { "transaction_hash": "0bf0de38c2619...", "output_hash": "b84a66c46e24fe71f9...", "output_index": 0, "value": 300000, "addresses": [ "3L7dKYQGNoZub928CJ8NC2WfrM8U8GGBjr" ], "script_signature": "03046022100de7b67b9...", "script_signature_hex": "00493046022100de7b...", "sequence": 4294967295 } ], "outputs": [ { "transaction_hash": "0bf0de38c261959...", "output_index": 0, "value": 290000, "addresses": [ "1K4nPxBMy6sv7jssTvDLJWk1ADHBZEoUVb" ], "script": "OP_DUP OP_HASH160 c629680b8d...", "script_hex": "76a914c629680b8d13...", "script_type": "pubkeyhash", "required_signatures": 1, "spent": false, "spending_transaction": null } ], "fees": 10000, "amount": 290000 }, Transaction.DEFAULT_TRANSACTION_VERSION Returns: two1.bitcoin.Transaction: a deserialized transaction derived from the provided json. """ inputs = [] outputs = [] addr_keys = set() for i in sorted(txn_json["vin"], key=lambda i: i["n"]): if 'coinbase' in i: inputs.append( CoinbaseInput(height=0, raw_script=bytes.fromhex(i['coinbase']), sequence=i['sequence'], block_version=1)) else: script = Script.from_hex(i["scriptSig"]["hex"]) inputs.append( TransactionInput(Hash(i["txid"]), i["vout"], script, i["sequence"])) if "addr" in i: addr_keys.add(i["addr"]) for o in sorted(txn_json["vout"], key=lambda o: o["n"]): script = Script.from_hex(o["scriptPubKey"]["hex"]) value = int( decimal.Decimal(str(o["value"])) * decimal.Decimal('1e8')) outputs.append(TransactionOutput(value, script)) if "addresses" in o["scriptPubKey"]: for a in o["scriptPubKey"]["addresses"]: addr_keys.add(a) txn = Transaction(Transaction.DEFAULT_TRANSACTION_VERSION, inputs, outputs, txn_json["locktime"]) assert txn.hash == Hash(txn_json['txid']) return txn, addr_keys