def from_bytes(b): """ Creates a BlockHeader object from a serialized bytestream. This function "eats" the bytestream and returns the remainder of the stream after deserializing the fields of the BlockHeader. Args: b (bytes): bytes beginning with the (4-byte) version. Returns: bh, b (tuple): A tuple containing two elements - a BlockHeader object and the remainder of the bytestream after deserialization. """ version, b = unpack_u32(b) prev_block_hash, b = Hash(b[0:32]), b[32:] merkle_root_hash, b = Hash(b[0:32]), b[32:] time, b = unpack_u32(b) bits, b = unpack_u32(b) nonce, b = unpack_u32(b) return ( BlockHeader(version, prev_block_hash, merkle_root_hash, time, bits, nonce), b )
def hash(self): """ Computes the double SHA-256 hash of the serialized object. Returns: Hash: object containing the hash """ return Hash.dhash(bytes(self))
def _op_hash256(self): """ The input is hashed two times with SHA-256. """ self._check_stack_len(1) x = self._stack.pop() self._stack.append(bytes(Hash.dhash(x)))
def get_transactions_by_id(self, ids): """ Gets transactions by their IDs. Args: ids (list(str)): List of TXIDs to retrieve. Returns: dict: A dict keyed by TXID of Transaction objects. """ ret = {} for txid in ids: r = self._request("GET", "/txs/%s" % txid, True, params={ "includeHex": "true", "limit": 999999999 }) data = r.json() block_hash = None if "block_hash" in data: block_hash = Hash(data['block_hash']) metadata = dict(block=data['block_height'], block_hash=block_hash, network_time=timegm( arrow.get( data['received']).datetime.timetuple()), confirmations=data['confirmations']) txn, _ = self.txn_from_json(data) assert str(txn.hash) == txid ret[txid] = dict(metadata=metadata, transaction=txn) return ret
def hash(self): """ Computes the hash of the transaction. Returns: dhash (bytes): Double SHA-256 hash of the serialized transaction. """ return Hash.dhash(bytes(self))
def get_transactions_by_id(self, ids): """ Gets transactions by their IDs. Args: ids (list(str)): List of TXIDs to retrieve. Returns: dict: A dict keyed by TXID of Transaction objects. """ ret = {} for txid in ids: r = self._request("GET", "transactions/%s" % txid) data = r.json() if r.status_code == 200: block_hash = None if data['block_hash']: block_hash = Hash(data['block_hash']) metadata = dict( block=data['block_height'], block_hash=block_hash, network_time=timegm( arrow.get( data['chain_received_at']).datetime.timetuple()), confirmations=data['confirmations']) txn, _ = self.txn_from_json(data) assert str(txn.hash) == txid ret[txid] = dict(metadata=metadata, transaction=txn) return ret
def run(self): self.logger.info("starting to mine") pool_target = utils.bits_to_target(self.notify_msg.bits_pool) for enonce2_num in range(0, 2**(self.enonce2_size * 8)): enonce2 = enonce2_num.to_bytes(self.enonce2_size, byteorder="big") cb_txn, _ = Transaction.from_bytes(self.notify_msg.coinb1 + self.enonce1 + enonce2 + self.notify_msg.coinb2) cb = CompactBlock( self.notify_msg.height, self.notify_msg.version, Hash(self.notify_msg.prev_block_hash), self.notify_msg.ntime, self.notify_msg.nbits, # lower difficulty work for testing self.notify_msg.merkle_edge, cb_txn) for nonce in range(0xffffffff): cb.block_header.nonce = nonce h = cb.block_header.hash.to_int('little') if h < pool_target: self.logger.info("Found Share") share = Share(enonce2=enonce2, nonce=nonce, work_id=self.notify_msg.work_id, otime=self.notify_msg.ntime) self.event_loop.call_soon_threadsafe( asyncio. async, self.handle_found_cb(share)) time.sleep(0.3)
def get_transactions(self, address_list, limit=100, min_block=None): """ Provides transactions associated with each address in address_list. Args: address_list (list(str)): List of Base58Check encoded Bitcoin addresses. limit (int): Maximum number of transactions to return. min_block (int): Block height from which to start getting transactions. If None, will get transactions from the entire blockchain. Returns: dict: A dict keyed by address with each value being a list of Transaction objects. """ last_block_index = self.get_block_height() ret = defaultdict(list) total_items = limit for addresses in self._list_chunks(address_list, 199): fr = 0 to = min(100, limit) while fr < total_items: req = "addrs/" + ",".join(addresses) + \ "/txs?from=%d&to=%d" % (fr, to) r = self._request("GET", req) txn_data = r.json() if "totalItems" in txn_data: total_items = txn_data["totalItems"] fr = txn_data["to"] to = fr + 100 for data in txn_data['items']: if "vin" not in data or "vout" not in data: continue block_hash = None block = None if data['confirmations'] > 0: block = last_block_index - data['confirmations'] + 1 block_hash = Hash(data['blockhash']) metadata = dict(block=block, block_hash=block_hash, network_time=data.get("time", None), confirmations=data['confirmations']) if min_block and block: if block < min_block: continue txn, addr_keys = self.txn_from_json(data) for addr in addr_keys: if addr in addresses: ret[addr].append(dict(metadata=metadata, transaction=txn)) return ret
def set_txn_side_effect_for_hd_discovery(self): # For each used account, there are at least 2 calls required: # 1 for the first DISCOVERY_INCREMENT payout addresses and 1 # for the first DISCOVERY_INCREMENT change # addresses. Depending on the number of used addresses for the # account, this will change. effects = [] n = self._num_used_accounts if n == 0: n = 1 for acct_num in range(n): for change in [0, 1]: num_used = self._num_used_addresses[acct_num][change] r = math.ceil( (num_used + HDAccount.GAP_LIMIT) / self.address_increment) k = 'change_addresses' if change else 'payout_addresses' addr_list = self._acct_keys[acct_num][k] if change: metadata = dict( block=234790 + r, block_hash=Hash( "000000000000000007d57f03ebe36dbe4f87ab2f340e93b45999ab249b6dc0df" ), confirmations=23890 - r) else: metadata = dict(block=None, block_hash=None, confirmations=0) if r == 0: r = 1 for i in range(r): start = i * self.address_increment end = (i + 1) * self.address_increment addr_range = range(start, end) out = TransactionOutput(value=10000, script=Script.build_p2pkh( address_to_key_hash( addr_list[i])[1])) dummy_txn = Transaction(1, [], [out], 0) m = MockTxnDict(num_used=num_used, addr_range=addr_range, addr_list=addr_list, used_value=[ dict(metadata=metadata, transaction=dummy_txn) ], unused_value=[]) effects.append(m) self.get_transactions.side_effect = effects return len(effects)
def _complete_merkle_edge(self): if self._cb_txn is None: # TODO: raise an error? return cur_hash = self._cb_txn.hash for e in self.merkle_edge: cur_hash = Hash.dhash(bytes(cur_hash) + bytes(e)) self.block_header.merkle_root_hash = cur_hash
class CoinbaseInput(TransactionInput): """ See https://bitcoin.org/en/developer-reference#coinbase Args: height (uint): The height of the block coinbase is part of will go into. Not required for version 1 blocks. raw_script (bytes): the bytes of the coinbase script. For block_version > 1 the height portion should NOT be included in this script. sequence (int): Unless you are Satoshi with a version 1 block, the default is fine. If you are Satoshi, send me some of your private keys and set this to 0. block_version (int): The version of the block this coinbase is a part of or will go into. If raw_script already contains the height of the block, this must be 1. """ NULL_OUTPOINT = Hash(bytes(32)) MAX_INT = 0xffffffff def __init__(self, height, raw_script, sequence=MAX_INT, block_version=3): self.height = height if block_version == 1: scr = raw_script else: scr = Script.build_push_int(self.height) + raw_script # Coinbase scripts are basically whatever, so we don't # try to create a script object from them. super().__init__(self.NULL_OUTPOINT, self.MAX_INT, scr, sequence) def __str__(self): """ Returns a human readable formatting of this input. Returns: s (str): A string containing the human readable input. """ return ("CoinbaseInput(" + "Outpoint: %s " % (self.outpoint) + "Outpoint Index: 0x%08x " % (self.outpoint_index) + "Script: %s " % (bytes_to_str(self.script)) + "Sequence: 0x%08x)" % (self.sequence_num)) def __bytes__(self): """ Serializes the object into a byte stream. Returns: b (bytes): byte stream containing the serialized coinbase input. """ return (bytes(self.outpoint) + pack_u32(self.outpoint_index) + pack_var_str(self.script) + pack_u32(self.sequence_num))
def __init__(self, height, version, prev_block_hash, time, bits, nonce, txns): self.block_header = BlockHeader(version, prev_block_hash, Hash(bytes(32)), # Fake merkle_root for now time, bits, nonce) # Fake nonce also self.height = height self.txns = txns self.merkle_tree = None self.invalidate()
def mine_work(work_msg, enonce1, enonce2_size): """ Mines the work using a CPU to find a valid solution Loops until the CPU finds a valid solution of the given work. Todo: slow down the click echo when on a 21BC Args: work_msg (WorkNotification): the work given by the pool API enonce1 (bytes): extra nonce required to make the coinbase transaction enonce2_size (int): size of the extra nonce 2 in bytes """ pool_target = utils.bits_to_target(work_msg.bits_pool) for enonce2_num in range(0, 2 ** (enonce2_size * 8)): enonce2 = enonce2_num.to_bytes(enonce2_size, byteorder="big") cb_txn, _ = Transaction.from_bytes( work_msg.coinb1 + enonce1 + enonce2 + work_msg.coinb2) cb = CompactBlock(work_msg.height, work_msg.version, Hash(work_msg.prev_block_hash), work_msg.ntime, work_msg.nbits, # lower difficulty work for testing work_msg.merkle_edge, cb_txn) row_counter = 0 for nonce in range(0xffffffff): if nonce % 6e3 == 0: click.echo(click.style(u'█', fg='green'), nl=False) row_counter += 1 if row_counter > 40: row_counter = 0 click.echo("") cb.block_header.nonce = nonce h = cb.block_header.hash.to_int('little') if h < pool_target: share = Share( enonce2=enonce2, nonce=nonce, work_id=work_msg.work_id, otime=int(time.time())) # adds a new line at the end of progress bar click.echo("") return share click.echo("Exhausted enonce1 space. Changing enonce2")
def __init__(self, height, version, prev_block_hash, time, bits, merkle_edge, cb_txn=None): self.block_header = BlockHeader(version, prev_block_hash, Hash(bytes(32)), # Fake merkle_root for now time, bits, 0) # Fake nonce also self.height = height self.merkle_edge = merkle_edge if cb_txn is not None: self.coinbase_transaction = cb_txn else: self._cb_txn = None
def set_txn_side_effect_for_index(self, account_index, address_index, change): dummy_txn = Transaction(1, [], [], 0) metadata = dict(block_height=234790, block_hash=Hash("000000000000000007d57f03ebe36dbe4f87ab2f340e93b45999ab249b6dc0df"), confirmations=23890) k = 'change_addresses' if change else 'payout_addresses' addr_list = self._acct_keys[account_index][k] mtd = MockTxnDict(num_used=address_index + 1, addr_range=range(address_index, address_index + 1), addr_list=addr_list, used_value=[dict(metadata=metadata, transaction=dummy_txn)], unused_value=[]) self.get_transactions.side_effect = [mtd]
def from_bytes(b): """ Deserializes a byte stream into a TransactionInput. Args: b (bytes): byte stream starting with the outpoint. Returns: tuple: First element of the tuple is the TransactionInput object and the second is the remaining byte stream. """ outpoint = b[0:32] outpoint_index, b1 = unpack_u32(b[32:]) script, b1 = Script.from_bytes(b1) sequence_num, b1 = unpack_u32(b1) return (TransactionInput(Hash(outpoint), outpoint_index, script, sequence_num), b1)
def get_utxos(self, addresses, include_unconfirmed=False): """ Returns a dict containing the UTXOs for the desired addresses Args: addresses (list): List of addresses to get balances for include_unconfirmed (bool): True if unconfirmed transactions should be included in the balance. Returns: dict: Keys are addresses, values are lists of UnspentTransactionOutput objects for the address. """ unconfirmed_mask = self.UNSPENT | self.UNCONFIRMED | self.PROVISIONAL rv = {} for addr in addresses: # Get the list of unspent deposits. if addr not in self._deposits_for_addr: continue for txid in self._deposits_for_addr[addr]: for i in self._deposits_for_addr[addr][txid]: # Look up the status in the outputs cache if txid not in self._outputs_cache or \ i not in self._outputs_cache[txid]: raise Exception("Don't have information for %r:%r" % (txid, i)) status = self._outputs_cache[txid][i]['status'] if status & self.UNSPENT: if (status & unconfirmed_mask and include_unconfirmed) or \ (status == self.UNSPENT and not include_unconfirmed): out = self._outputs_cache[txid][i]['output'] utxo = UnspentTransactionOutput( transaction_hash=Hash(txid), outpoint_index=i, value=out.value, scr=out.script, confirmations=self._txn_cache[txid]. confirmations) if addr not in rv: rv[addr] = [] rv[addr].append(utxo) return rv
def _invalidate_coinbase(self, merkle_node=None): if merkle_node is None: merkle_node = self.merkle_tree if(merkle_node.left_child is None and merkle_node.right_child is None): # This is the node corresponding to the coinbase, update hash merkle_node.hash = self.coinbase_transaction.hash return else: self._invalidate_coinbase(merkle_node.left_child) merkle_node.hash = Hash.dhash(bytes(merkle_node.left_child.hash) + bytes(merkle_node.right_child.hash)) # If we're back at the root, update the blockheader if merkle_node is self.merkle_tree: self.block_header.merkle_root_hash = self.merkle_tree.hash
def get_transactions(self, address_list, limit=100, min_block=None): """ Provides transactions associated with each address in address_list. Args: address_list (list(str)): List of Base58Check encoded Bitcoin addresses. limit (int): Maximum number of transactions to return. min_block (int): Block height from which to start getting transactions. If None, will get transactions from the entire blockchain. Returns: dict: A dict keyed by address with each value being a list of Transaction objects. """ ret = defaultdict(list) for addresses in self._list_chunks(address_list, 199): path = "addresses/" + ",".join(addresses) \ + "/transactions?limit={}".format(limit) if min_block: path += "&min_block={}".format(min_block) r = self._request("GET", path) txn_data = r.json() for data in txn_data: block_hash = None if data['block_hash']: block_hash = Hash(data['block_hash']) metadata = dict( block=data['block_height'], block_hash=block_hash, network_time=timegm( arrow.get( data['chain_received_at']).datetime.timetuple()), confirmations=data['confirmations']) txn, addr_keys = self.txn_from_json(data) for addr in addr_keys: if addr in addresses: ret[addr].append( dict(metadata=metadata, transaction=txn)) return ret
def _deserialize(wt_dict): # Private, only for internal wallet usage wt = WalletTransaction.from_hex(wt_dict['transaction']) if 'metadata' in wt_dict: m = wt_dict['metadata'] else: m = wt_dict wt.block = m['block'] if m['block_hash'] is not None: wt.block_hash = Hash(m['block_hash']) wt.confirmations = m['confirmations'] wt.network_time = m['network_time'] if 'value' in m: wt.value = m['value'] if 'fees' in m: wt.fees = m['fees'] if 'provisional' in m: wt.provisional = m['provisional'] return wt
def get_signature_for_input(self, input_index, hash_type, private_key, sub_script): """ Returns the signature for an input. This function only returns the signature for an input, it does not insert the signature into the script member of the input. It also does not validate that the given private key matches any public keys in the sub_script. Args: input_index (int): The index of the input to sign. hash_type (int): What kind of signature hash to do. private_key (crypto.PrivateKey): private key with which to sign the transaction. sub_script (Script): the scriptPubKey of the corresponding utxo being spent if the outpoint is P2PKH or the redeem script if the outpoint is P2SH. Returns: tuple: A tuple containing the signature object and the message that was signed. """ if input_index < 0 or input_index >= len(self.inputs): raise ValueError("Invalid input index.") tmp_script = sub_script.remove_op("OP_CODESEPARATOR") if hash_type & 0x1f == self.SIG_HASH_SINGLE and len(self.inputs) > len( self.outputs): # This is to deal with the bug where specifying an index # that is out of range (wrt outputs) results in a # signature hash of 0x1 (little-endian) msg_to_sign = 0x1.to_bytes(32, 'little') else: txn_copy = self._copy_for_sig(input_index, hash_type, tmp_script) msg_to_sign = bytes( Hash.dhash(bytes(txn_copy) + pack_u32(hash_type))) sig = private_key.sign(msg_to_sign, False) return sig, msg_to_sign
def _compute_merkle_tree(self): """ Computes the merkle tree from the transactions in self.transactions. The merkle root is the top node in the tree and can be accessed as self.merkle_tree.merkle_hash. """ # Tree gets built bottom up level_nodes = [MerkleNode(t.hash, None, None) for t in self.txns] while True: if len(level_nodes) == 1: self.merkle_tree = level_nodes[0] # This is the root return if len(level_nodes) % 2 != 0: # Make sure there are an even number of nodes level_nodes.append(level_nodes[-1]) new_level = [] for i in range(0, len(level_nodes), 2): left = level_nodes[i] right = level_nodes[i+1] n = MerkleNode(Hash.dhash(bytes(left.hash) + bytes(right.hash)), left, right) new_level.append(n) level_nodes = new_level
def get_signature_for_input(self, input_index, hash_type, private_key, sub_script): """ Returns the signature for an input. This function only returns the signature for an input, it does not insert the signature into the script member of the input. It also does not validate that the given private key matches any public keys in the sub_script. Args: input_index (int): The index of the input to sign. hash_type (int): What kind of signature hash to do. private_key (crypto.PrivateKey): private key with which to sign the transaction. sub_script (Script): the scriptPubKey of the corresponding utxo being spent if the outpoint is P2PKH or the redeem script if the outpoint is P2SH. Returns: tuple: A tuple containing the signature object and the message that was signed. """ if input_index < 0 or input_index >= len(self.inputs): raise ValueError("Invalid input index.") tmp_script = sub_script.remove_op("OP_CODESEPARATOR") if hash_type & 0x1f == self.SIG_HASH_SINGLE and len(self.inputs) > len(self.outputs): # This is to deal with the bug where specifying an index # that is out of range (wrt outputs) results in a # signature hash of 0x1 (little-endian) msg_to_sign = 0x1.to_bytes(32, 'little') else: txn_copy = self._copy_for_sig(input_index, hash_type, tmp_script) msg_to_sign = bytes(Hash.dhash(bytes(txn_copy) + pack_u32(hash_type))) sig = private_key.sign(msg_to_sign, False) return sig, msg_to_sign
def mine_work(work_msg, enonce1, enonce2_size): pool_target = utils.bits_to_target(work_msg.bits_pool) for enonce2_num in range(0, 2**(enonce2_size * 8)): enonce2 = enonce2_num.to_bytes(enonce2_size, byteorder="big") cb_txn, _ = Transaction.from_bytes(work_msg.coinb1 + enonce1 + enonce2 + work_msg.coinb2) cb = CompactBlock( work_msg.height, work_msg.version, Hash(work_msg.prev_block_hash), work_msg.ntime, work_msg.nbits, # lower difficulty work for testing work_msg.merkle_edge, cb_txn) row_counter = 0 for nonce in range(0xffffffff): if nonce % 6e3 == 0: click.echo(click.style(u'█', fg='green'), nl=False) row_counter += 1 if row_counter > 40: row_counter = 0 click.echo("") cb.block_header.nonce = nonce h = cb.block_header.hash.to_int('little') if h < pool_target: share = Share(enonce2=enonce2, nonce=nonce, work_id=work_msg.work_id, otime=int(time.time())) # adds a new line at the end of progress bar click.echo("") return share click.echo("Exhausted enonce1 space. Changing enonce2")
def get_transactions_by_id(self, ids): """ Gets transactions by their IDs. Args: ids (list(str)): List of TXIDs to retrieve. Returns: dict: A dict keyed by TXID of Transaction objects. """ last_block_index = self.get_block_height() ret = {} for txid in ids: r = self._request("GET", "tx/%s" % txid) data = r.json() if r.status_code == 200: if "vin" not in data or "vout" not in data: continue block_hash = None block = None if data['confirmations'] > 0: block = last_block_index - data['confirmations'] + 1 block_hash = Hash(data['blockhash']) metadata = dict(block=block, block_hash=block_hash, network_time=data['time'], confirmations=data['confirmations']) txn, _ = self.txn_from_json(data) assert str(txn.hash) == txid ret[txid] = dict(metadata=metadata, transaction=txn) return ret
def txn_from_json(txn_json): """ Args: txn_json: Json with the following format: { "block_hash": "0000000000000000af64802c79...", "block_height": 292586, "hash": "b4735a0690dab16b8789fceaf81c511f...", "addresses": [ "18KXZzuC3xvz6upUMQpsZzXrBwNPWZjdSa", "1AAuRETEcHDqL4VM3R97aZHP8DSUHxpkFV", "1DEP8i3QJCsomS4BSMY2RpU1upv62aGvhD", "1VxsEDjo6ZLMT99dpcLu4RQonMDVEQQTG" ], "total": 3537488, "fees": 20000, "size": 438, "preference": "medium", "relayed_by": "", "confirmed": "2014-03-26T17:08:04Z", "received": "2014-03-26T17:08:04Z", "ver": 1, "lock_time": 0, "double_spend": false, "vin_sz": 2, "vout_sz": 2, "confirmations": 64492, "confidence": 1, "inputs": [ { "prev_hash": "729f6469b59fea5da7...", "output_index": 0, "script": "483045022100d06cdad1a...", "output_value": 3500000, "sequence": 4294967295, "addresses": [ "1VxsEDjo6ZLMT99dpcLu4RQonMDVEQQTG" ], "script_type": "pay-to-pubkey-hash" }, ... ], "outputs": [ { "value": 3500000, "script": "76a9148629647bd642a237...", "addresses": [ "1DEP8i3QJCsomS4BSMY2RpU1upv62aGvhD" ], "script_type": "pay-to-pubkey-hash" } ]... Returns: An Object of type Transaction """ inputs = [] outputs = [] addr_keys = set() for i in txn_json["inputs"]: # Chain doesn't return the stuff about script length etc, so # we need to prepend that. script, _ = Script.from_bytes( pack_var_str(bytes.fromhex(i["script"]))) inputs.append( TransactionInput(Hash(i["prev_hash"]), i["output_index"], script, i["sequence"])) if "addresses" in i and i["addresses"]: addr_keys.add(i["addresses"][0]) for i in txn_json["outputs"]: script, _ = Script.from_bytes( pack_var_str(bytes.fromhex(i["script"]))) outputs.append(TransactionOutput(i["value"], script)) if "addresses" in i and i["addresses"]: addr_keys.add(i["addresses"][0]) txn = Transaction(Transaction.DEFAULT_TRANSACTION_VERSION, inputs, outputs, txn_json["lock_time"]) return txn, addr_keys
def sign_input(self, input_index, hash_type, private_key, sub_script): """ Signs an input. Args: input_index (int): The index of the input to sign. hash_type (int): What kind of signature hash to do. private_key (crypto.PrivateKey): private key with which to sign the transaction. sub_script (Script): the scriptPubKey of the corresponding utxo being spent if the outpoint is P2PKH or the redeem script if the outpoint is P2SH. """ if input_index < 0 or input_index >= len(self.inputs): raise ValueError("Invalid input index.") inp = self.inputs[input_index] curr_script_sig = inp.script multisig = False multisig_params = None multisig_key_index = -1 if sub_script.is_multisig_redeem(): multisig = True multisig_params = sub_script.extract_multisig_redeem_info() elif not sub_script.is_p2pkh(): raise TypeError("Signing arbitrary redeem scripts is not currently supported.") tmp_script = sub_script.remove_op("OP_CODESEPARATOR") compressed = False if hash_type & 0x1f == self.SIG_HASH_SINGLE and len(self.inputs) > len(self.outputs): # This is to deal with the bug where specifying an index # that is out of range (wrt outputs) results in a # signature hash of 0x1 (little-endian) msg_to_sign = 0x1.to_bytes(32, 'little') else: txn_copy = self._copy_for_sig(input_index, hash_type, tmp_script) if multisig: # Determine which of the public keys this private key # corresponds to. public_keys = multisig_params['public_keys'] pub_key_full = self._get_public_key_bytes(private_key, False) pub_key_comp = self._get_public_key_bytes(private_key, True) for i, p in enumerate(public_keys): if pub_key_full == p or pub_key_comp == p: multisig_key_index = i break if multisig_key_index == -1: raise ValueError( "Public key derived from private key does not match any of the public keys in redeem script.") else: # Before signing we should verify that the address in the # sub_script corresponds to that of the private key script_pub_key_h160_hex = tmp_script.get_hash160() if script_pub_key_h160_hex is None: raise ValueError("Couldn't find public key hash in sub_script!") # first try uncompressed key h160 = None for compressed in [True, False]: h160 = private_key.public_key.hash160(compressed) if h160 != bytes.fromhex(script_pub_key_h160_hex[2:]): h160 = None else: break if h160 is None: raise ValueError("Address derived from private key does not match sub_script!") msg_to_sign = bytes(Hash.dhash(bytes(txn_copy) + pack_u32(hash_type))) sig = private_key.sign(msg_to_sign, False) if multisig: # For multisig, we need to determine if there are already # signatures and if so, where we insert this signature inp.script = self._do_multisig_script([dict(index=multisig_key_index, signature=sig)], msg_to_sign, curr_script_sig, tmp_script, hash_type) else: pub_key_bytes = self._get_public_key_bytes(private_key, compressed) pub_key_str = pack_var_str(pub_key_bytes) script_sig = pack_var_str( sig.to_der() + pack_compact_int(hash_type)) + pub_key_str inp.script = Script(script_sig) return True
def txn_from_json(txn_json): """ Returns a new Transaction from a JSON-serialized transaction Args: txn_json: JSON with the following format: { "hash": "0bf0de38c26195919179f...", "block_hash": "000000000000000...", "block_height": 303404, "block_time": "2014-05-30T23:54:55Z", "chain_received_at": "2015-08-13T10:52:21.718Z", "confirmations": 69389, "lock_time": 0, "inputs": [ { "transaction_hash": "0bf0de38c2619...", "output_hash": "b84a66c46e24fe71f9...", "output_index": 0, "value": 300000, "addresses": [ "3L7dKYQGNoZub928CJ8NC2WfrM8U8GGBjr" ], "script_signature": "03046022100de7b67b9...", "script_signature_hex": "00493046022100de7b...", "sequence": 4294967295 } ], "outputs": [ { "transaction_hash": "0bf0de38c261959...", "output_index": 0, "value": 290000, "addresses": [ "1K4nPxBMy6sv7jssTvDLJWk1ADHBZEoUVb" ], "script": "OP_DUP OP_HASH160 c629680b8d...", "script_hex": "76a914c629680b8d13...", "script_type": "pubkeyhash", "required_signatures": 1, "spent": false, "spending_transaction": null } ], "fees": 10000, "amount": 290000 }, Transaction.DEFAULT_TRANSACTION_VERSION Returns: two1.lib.bitcoin.Transaction: a deserialized transaction derived from the provided json. """ inputs = [] outputs = [] addr_keys = set() for i in txn_json["inputs"]: if 'coinbase' in i: inputs.append( CoinbaseInput(height=txn_json["block_height"] or 0, raw_script=bytes.fromhex(i['coinbase']), sequence=i['sequence'], block_version=1)) else: # Script length etc. are not returned so we need to # prepend that. script, _ = Script.from_bytes( pack_var_str(bytes.fromhex(i["script_signature_hex"]))) inputs.append( TransactionInput(Hash(i["output_hash"]), i["output_index"], script, i["sequence"])) if "addresses" in i: addr_keys.add(i["addresses"][0]) for i in txn_json["outputs"]: script, _ = Script.from_bytes( pack_var_str(bytes.fromhex(i["script_hex"]))) outputs.append(TransactionOutput(i["value"], script)) if "addresses" in i: addr_keys.add(i["addresses"][0]) txn = Transaction(Transaction.DEFAULT_TRANSACTION_VERSION, inputs, outputs, txn_json["lock_time"]) return txn, addr_keys
def txn_from_json(txn_json): """ Returns a new Transaction from a JSON-serialized transaction Args: txn_json: JSON with the following format: { "hash": "0bf0de38c26195919179f...", "block_hash": "000000000000000...", "block_height": 303404, "block_time": "2014-05-30T23:54:55Z", "chain_received_at": "2015-08-13T10:52:21.718Z", "confirmations": 69389, "lock_time": 0, "inputs": [ { "transaction_hash": "0bf0de38c2619...", "output_hash": "b84a66c46e24fe71f9...", "output_index": 0, "value": 300000, "addresses": [ "3L7dKYQGNoZub928CJ8NC2WfrM8U8GGBjr" ], "script_signature": "03046022100de7b67b9...", "script_signature_hex": "00493046022100de7b...", "sequence": 4294967295 } ], "outputs": [ { "transaction_hash": "0bf0de38c261959...", "output_index": 0, "value": 290000, "addresses": [ "1K4nPxBMy6sv7jssTvDLJWk1ADHBZEoUVb" ], "script": "OP_DUP OP_HASH160 c629680b8d...", "script_hex": "76a914c629680b8d13...", "script_type": "pubkeyhash", "required_signatures": 1, "spent": false, "spending_transaction": null } ], "fees": 10000, "amount": 290000 }, Transaction.DEFAULT_TRANSACTION_VERSION Returns: two1.lib.bitcoin.Transaction: a deserialized transaction derived from the provided json. """ inputs = [] outputs = [] addr_keys = set() for i in sorted(txn_json["vin"], key=lambda i: i["n"]): if 'coinbase' in i: inputs.append(CoinbaseInput(height=0, raw_script=bytes.fromhex(i['coinbase']), sequence=i['sequence'], block_version=1)) else: script = Script.from_hex(i["scriptSig"]["hex"]) inputs.append(TransactionInput(Hash(i["txid"]), i["vout"], script, i["sequence"])) if "addr" in i: addr_keys.add(i["addr"]) for o in sorted(txn_json["vout"], key=lambda o: o["n"]): script = Script.from_hex(o["scriptPubKey"]["hex"]) value = int(decimal.Decimal(str(o["value"])) * decimal.Decimal('1e8')) outputs.append(TransactionOutput(value, script)) if "addresses" in o["scriptPubKey"]: for a in o["scriptPubKey"]["addresses"]: addr_keys.add(a) txn = Transaction(Transaction.DEFAULT_TRANSACTION_VERSION, inputs, outputs, txn_json["locktime"]) assert txn.hash == Hash(txn_json['txid']) return txn, addr_keys
def get_transactions(self, address_list, limit=100, min_block=None): """ Provides transactions associated with each address in address_list. Args: address_list (list(str)): List of Base58Check encoded Bitcoin addresses. limit (int): Maximum number of transactions to return. min_block (int): Block height from which to start getting transactions. If None, will get transactions from the entire blockchain. Returns: dict: A dict keyed by address with each value being a list of Transaction objects. """ ret = defaultdict(list) min_block = min_block or 0 address_list_local = address_list[:] while address_list_local: addresses, address_list_local = self._pop_chunks( address_list_local, self.rate_limit_per_sec) r = self._request("GET", "/addrs/{}/full".format(";".join(addresses)), False, params={ "limit": 999999999, "after": min_block }) return_data = r.json() if isinstance(return_data, dict): return_data = [ return_data, ] received_addresses = set() for txn_data in return_data: if "error" in txn_data: continue received_addresses.add(txn_data["address"]) for txn in filter(lambda x: x["block_height"] >= min_block, txn_data["txs"]): block_hash = None if "block_hash" in txn and txn['block_hash']: block_hash = Hash(txn['block_hash']) metadata = dict( block=txn['block_height'], block_hash=block_hash, network_time=timegm( arrow.get(txn['received']).datetime.timetuple()), confirmations=txn['confirmations']) txn_obj, addr_keys = self.txn_from_json(txn) for addr in addr_keys: if addr in addresses: ret[addr].append( dict(metadata=metadata, transaction=txn_obj)) remainder = addresses - received_addresses address_list_local.extend(remainder) if remainder: # If the rate limit kicked, sleep for a little bit time.sleep(0.8) return ret