def from_bytes(b): """ Creates a BlockHeader object from a serialized bytestream. This function "eats" the bytestream and returns the remainder of the stream after deserializing the fields of the BlockHeader. Args: b (bytes): bytes beginning with the (4-byte) version. Returns: bh, b (tuple): A tuple containing two elements - a BlockHeader object and the remainder of the bytestream after deserialization. """ version, b = unpack_u32(b) prev_block_hash, b = Hash(b[0:32]), b[32:] merkle_root_hash, b = Hash(b[0:32]), b[32:] time, b = unpack_u32(b) bits, b = unpack_u32(b) nonce, b = unpack_u32(b) return ( BlockHeader(version, prev_block_hash, merkle_root_hash, time, bits, nonce), b )
def get_transactions(self, address_list, limit=100, min_block=None): """ Provides transactions associated with each address in address_list. Args: address_list (list(str)): List of Base58Check encoded Bitcoin addresses. limit (int): Maximum number of transactions to return. min_block (int): Block height from which to start getting transactions. If None, will get transactions from the entire blockchain. Returns: dict: A dict keyed by address with each value being a list of Transaction objects. """ last_block_index = self.get_block_height() ret = defaultdict(list) total_items = limit for addresses in self._list_chunks(address_list, 199): fr = 0 to = min(100, limit) while fr < total_items: req = "addrs/" + ",".join(addresses) + \ "/txs?from=%d&to=%d" % (fr, to) r = self._request("GET", req) txn_data = r.json() if "totalItems" in txn_data: total_items = txn_data["totalItems"] fr = txn_data["to"] to = fr + 100 for data in txn_data['items']: if "vin" not in data or "vout" not in data: continue block_hash = None block = None if data['confirmations'] > 0: block = last_block_index - data['confirmations'] + 1 block_hash = Hash(data['blockhash']) metadata = dict(block=block, block_hash=block_hash, network_time=data.get("time", None), confirmations=data['confirmations']) if min_block and block: if block < min_block: continue txn, addr_keys = self.txn_from_json(data) for addr in addr_keys: if addr in addresses: ret[addr].append(dict(metadata=metadata, transaction=txn)) return ret
def get_transactions_by_id(self, ids): """ Gets transactions by their IDs. Args: ids (list(str)): List of TXIDs to retrieve. Returns: dict: A dict keyed by TXID of Transaction objects. """ ret = {} for txid in ids: r = self._request("GET", "/txs/%s" % txid, True, params={ "includeHex": "true", "limit": 999999999 }) data = r.json() block_hash = None if "block_hash" in data: block_hash = Hash(data['block_hash']) metadata = dict(block=data['block_height'], block_hash=block_hash, network_time=timegm( arrow.get( data['received']).datetime.timetuple()), confirmations=data['confirmations']) txn, _ = self.txn_from_json(data) assert str(txn.hash) == txid ret[txid] = dict(metadata=metadata, transaction=txn) return ret
def get_transactions_by_id(self, ids): """ Gets transactions by their IDs. Args: ids (list(str)): List of TXIDs to retrieve. Returns: dict: A dict keyed by TXID of Transaction objects. """ ret = {} for txid in ids: r = self._request("GET", "transactions/%s" % txid) data = r.json() if r.status_code == 200: block_hash = None if data['block_hash']: block_hash = Hash(data['block_hash']) metadata = dict( block=data['block_height'], block_hash=block_hash, network_time=timegm( arrow.get( data['chain_received_at']).datetime.timetuple()), confirmations=data['confirmations']) txn, _ = self.txn_from_json(data) assert str(txn.hash) == txid ret[txid] = dict(metadata=metadata, transaction=txn) return ret
def run(self): self.logger.info("starting to mine") pool_target = utils.bits_to_target(self.notify_msg.bits_pool) for enonce2_num in range(0, 2**(self.enonce2_size * 8)): enonce2 = enonce2_num.to_bytes(self.enonce2_size, byteorder="big") cb_txn, _ = Transaction.from_bytes(self.notify_msg.coinb1 + self.enonce1 + enonce2 + self.notify_msg.coinb2) cb = CompactBlock( self.notify_msg.height, self.notify_msg.version, Hash(self.notify_msg.prev_block_hash), self.notify_msg.ntime, self.notify_msg.nbits, # lower difficulty work for testing self.notify_msg.merkle_edge, cb_txn) for nonce in range(0xffffffff): cb.block_header.nonce = nonce h = cb.block_header.hash.to_int('little') if h < pool_target: self.logger.info("Found Share") share = Share(enonce2=enonce2, nonce=nonce, work_id=self.notify_msg.work_id, otime=self.notify_msg.ntime) self.event_loop.call_soon_threadsafe( asyncio. async, self.handle_found_cb(share)) time.sleep(0.3)
def set_txn_side_effect_for_hd_discovery(self): # For each used account, there are at least 2 calls required: # 1 for the first DISCOVERY_INCREMENT payout addresses and 1 # for the first DISCOVERY_INCREMENT change # addresses. Depending on the number of used addresses for the # account, this will change. effects = [] n = self._num_used_accounts if n == 0: n = 1 for acct_num in range(n): for change in [0, 1]: num_used = self._num_used_addresses[acct_num][change] r = math.ceil( (num_used + HDAccount.GAP_LIMIT) / self.address_increment) k = 'change_addresses' if change else 'payout_addresses' addr_list = self._acct_keys[acct_num][k] if change: metadata = dict( block=234790 + r, block_hash=Hash( "000000000000000007d57f03ebe36dbe4f87ab2f340e93b45999ab249b6dc0df" ), confirmations=23890 - r) else: metadata = dict(block=None, block_hash=None, confirmations=0) if r == 0: r = 1 for i in range(r): start = i * self.address_increment end = (i + 1) * self.address_increment addr_range = range(start, end) out = TransactionOutput(value=10000, script=Script.build_p2pkh( address_to_key_hash( addr_list[i])[1])) dummy_txn = Transaction(1, [], [out], 0) m = MockTxnDict(num_used=num_used, addr_range=addr_range, addr_list=addr_list, used_value=[ dict(metadata=metadata, transaction=dummy_txn) ], unused_value=[]) effects.append(m) self.get_transactions.side_effect = effects return len(effects)
class CoinbaseInput(TransactionInput): """ See https://bitcoin.org/en/developer-reference#coinbase Args: height (uint): The height of the block coinbase is part of will go into. Not required for version 1 blocks. raw_script (bytes): the bytes of the coinbase script. For block_version > 1 the height portion should NOT be included in this script. sequence (int): Unless you are Satoshi with a version 1 block, the default is fine. If you are Satoshi, send me some of your private keys and set this to 0. block_version (int): The version of the block this coinbase is a part of or will go into. If raw_script already contains the height of the block, this must be 1. """ NULL_OUTPOINT = Hash(bytes(32)) MAX_INT = 0xffffffff def __init__(self, height, raw_script, sequence=MAX_INT, block_version=3): self.height = height if block_version == 1: scr = raw_script else: scr = Script.build_push_int(self.height) + raw_script # Coinbase scripts are basically whatever, so we don't # try to create a script object from them. super().__init__(self.NULL_OUTPOINT, self.MAX_INT, scr, sequence) def __str__(self): """ Returns a human readable formatting of this input. Returns: s (str): A string containing the human readable input. """ return ("CoinbaseInput(" + "Outpoint: %s " % (self.outpoint) + "Outpoint Index: 0x%08x " % (self.outpoint_index) + "Script: %s " % (bytes_to_str(self.script)) + "Sequence: 0x%08x)" % (self.sequence_num)) def __bytes__(self): """ Serializes the object into a byte stream. Returns: b (bytes): byte stream containing the serialized coinbase input. """ return (bytes(self.outpoint) + pack_u32(self.outpoint_index) + pack_var_str(self.script) + pack_u32(self.sequence_num))
def __init__(self, height, version, prev_block_hash, time, bits, nonce, txns): self.block_header = BlockHeader(version, prev_block_hash, Hash(bytes(32)), # Fake merkle_root for now time, bits, nonce) # Fake nonce also self.height = height self.txns = txns self.merkle_tree = None self.invalidate()
def mine_work(work_msg, enonce1, enonce2_size): """ Mines the work using a CPU to find a valid solution Loops until the CPU finds a valid solution of the given work. Todo: slow down the click echo when on a 21BC Args: work_msg (WorkNotification): the work given by the pool API enonce1 (bytes): extra nonce required to make the coinbase transaction enonce2_size (int): size of the extra nonce 2 in bytes """ pool_target = utils.bits_to_target(work_msg.bits_pool) for enonce2_num in range(0, 2 ** (enonce2_size * 8)): enonce2 = enonce2_num.to_bytes(enonce2_size, byteorder="big") cb_txn, _ = Transaction.from_bytes( work_msg.coinb1 + enonce1 + enonce2 + work_msg.coinb2) cb = CompactBlock(work_msg.height, work_msg.version, Hash(work_msg.prev_block_hash), work_msg.ntime, work_msg.nbits, # lower difficulty work for testing work_msg.merkle_edge, cb_txn) row_counter = 0 for nonce in range(0xffffffff): if nonce % 6e3 == 0: click.echo(click.style(u'█', fg='green'), nl=False) row_counter += 1 if row_counter > 40: row_counter = 0 click.echo("") cb.block_header.nonce = nonce h = cb.block_header.hash.to_int('little') if h < pool_target: share = Share( enonce2=enonce2, nonce=nonce, work_id=work_msg.work_id, otime=int(time.time())) # adds a new line at the end of progress bar click.echo("") return share click.echo("Exhausted enonce1 space. Changing enonce2")
def __init__(self, height, version, prev_block_hash, time, bits, merkle_edge, cb_txn=None): self.block_header = BlockHeader(version, prev_block_hash, Hash(bytes(32)), # Fake merkle_root for now time, bits, 0) # Fake nonce also self.height = height self.merkle_edge = merkle_edge if cb_txn is not None: self.coinbase_transaction = cb_txn else: self._cb_txn = None
def set_txn_side_effect_for_index(self, account_index, address_index, change): dummy_txn = Transaction(1, [], [], 0) metadata = dict(block_height=234790, block_hash=Hash("000000000000000007d57f03ebe36dbe4f87ab2f340e93b45999ab249b6dc0df"), confirmations=23890) k = 'change_addresses' if change else 'payout_addresses' addr_list = self._acct_keys[account_index][k] mtd = MockTxnDict(num_used=address_index + 1, addr_range=range(address_index, address_index + 1), addr_list=addr_list, used_value=[dict(metadata=metadata, transaction=dummy_txn)], unused_value=[]) self.get_transactions.side_effect = [mtd]
def from_bytes(b): """ Deserializes a byte stream into a TransactionInput. Args: b (bytes): byte stream starting with the outpoint. Returns: tuple: First element of the tuple is the TransactionInput object and the second is the remaining byte stream. """ outpoint = b[0:32] outpoint_index, b1 = unpack_u32(b[32:]) script, b1 = Script.from_bytes(b1) sequence_num, b1 = unpack_u32(b1) return (TransactionInput(Hash(outpoint), outpoint_index, script, sequence_num), b1)
def get_utxos(self, addresses, include_unconfirmed=False): """ Returns a dict containing the UTXOs for the desired addresses Args: addresses (list): List of addresses to get balances for include_unconfirmed (bool): True if unconfirmed transactions should be included in the balance. Returns: dict: Keys are addresses, values are lists of UnspentTransactionOutput objects for the address. """ unconfirmed_mask = self.UNSPENT | self.UNCONFIRMED | self.PROVISIONAL rv = {} for addr in addresses: # Get the list of unspent deposits. if addr not in self._deposits_for_addr: continue for txid in self._deposits_for_addr[addr]: for i in self._deposits_for_addr[addr][txid]: # Look up the status in the outputs cache if txid not in self._outputs_cache or \ i not in self._outputs_cache[txid]: raise Exception("Don't have information for %r:%r" % (txid, i)) status = self._outputs_cache[txid][i]['status'] if status & self.UNSPENT: if (status & unconfirmed_mask and include_unconfirmed) or \ (status == self.UNSPENT and not include_unconfirmed): out = self._outputs_cache[txid][i]['output'] utxo = UnspentTransactionOutput( transaction_hash=Hash(txid), outpoint_index=i, value=out.value, scr=out.script, confirmations=self._txn_cache[txid]. confirmations) if addr not in rv: rv[addr] = [] rv[addr].append(utxo) return rv
def get_transactions(self, address_list, limit=100, min_block=None): """ Provides transactions associated with each address in address_list. Args: address_list (list(str)): List of Base58Check encoded Bitcoin addresses. limit (int): Maximum number of transactions to return. min_block (int): Block height from which to start getting transactions. If None, will get transactions from the entire blockchain. Returns: dict: A dict keyed by address with each value being a list of Transaction objects. """ ret = defaultdict(list) for addresses in self._list_chunks(address_list, 199): path = "addresses/" + ",".join(addresses) \ + "/transactions?limit={}".format(limit) if min_block: path += "&min_block={}".format(min_block) r = self._request("GET", path) txn_data = r.json() for data in txn_data: block_hash = None if data['block_hash']: block_hash = Hash(data['block_hash']) metadata = dict( block=data['block_height'], block_hash=block_hash, network_time=timegm( arrow.get( data['chain_received_at']).datetime.timetuple()), confirmations=data['confirmations']) txn, addr_keys = self.txn_from_json(data) for addr in addr_keys: if addr in addresses: ret[addr].append( dict(metadata=metadata, transaction=txn)) return ret
def _deserialize(wt_dict): # Private, only for internal wallet usage wt = WalletTransaction.from_hex(wt_dict['transaction']) if 'metadata' in wt_dict: m = wt_dict['metadata'] else: m = wt_dict wt.block = m['block'] if m['block_hash'] is not None: wt.block_hash = Hash(m['block_hash']) wt.confirmations = m['confirmations'] wt.network_time = m['network_time'] if 'value' in m: wt.value = m['value'] if 'fees' in m: wt.fees = m['fees'] if 'provisional' in m: wt.provisional = m['provisional'] return wt
def mine_work(work_msg, enonce1, enonce2_size): pool_target = utils.bits_to_target(work_msg.bits_pool) for enonce2_num in range(0, 2**(enonce2_size * 8)): enonce2 = enonce2_num.to_bytes(enonce2_size, byteorder="big") cb_txn, _ = Transaction.from_bytes(work_msg.coinb1 + enonce1 + enonce2 + work_msg.coinb2) cb = CompactBlock( work_msg.height, work_msg.version, Hash(work_msg.prev_block_hash), work_msg.ntime, work_msg.nbits, # lower difficulty work for testing work_msg.merkle_edge, cb_txn) row_counter = 0 for nonce in range(0xffffffff): if nonce % 6e3 == 0: click.echo(click.style(u'█', fg='green'), nl=False) row_counter += 1 if row_counter > 40: row_counter = 0 click.echo("") cb.block_header.nonce = nonce h = cb.block_header.hash.to_int('little') if h < pool_target: share = Share(enonce2=enonce2, nonce=nonce, work_id=work_msg.work_id, otime=int(time.time())) # adds a new line at the end of progress bar click.echo("") return share click.echo("Exhausted enonce1 space. Changing enonce2")
def get_transactions_by_id(self, ids): """ Gets transactions by their IDs. Args: ids (list(str)): List of TXIDs to retrieve. Returns: dict: A dict keyed by TXID of Transaction objects. """ last_block_index = self.get_block_height() ret = {} for txid in ids: r = self._request("GET", "tx/%s" % txid) data = r.json() if r.status_code == 200: if "vin" not in data or "vout" not in data: continue block_hash = None block = None if data['confirmations'] > 0: block = last_block_index - data['confirmations'] + 1 block_hash = Hash(data['blockhash']) metadata = dict(block=block, block_hash=block_hash, network_time=data['time'], confirmations=data['confirmations']) txn, _ = self.txn_from_json(data) assert str(txn.hash) == txid ret[txid] = dict(metadata=metadata, transaction=txn) return ret
def txn_from_json(txn_json): """ Returns a new Transaction from a JSON-serialized transaction Args: txn_json: JSON with the following format: { "hash": "0bf0de38c26195919179f...", "block_hash": "000000000000000...", "block_height": 303404, "block_time": "2014-05-30T23:54:55Z", "chain_received_at": "2015-08-13T10:52:21.718Z", "confirmations": 69389, "lock_time": 0, "inputs": [ { "transaction_hash": "0bf0de38c2619...", "output_hash": "b84a66c46e24fe71f9...", "output_index": 0, "value": 300000, "addresses": [ "3L7dKYQGNoZub928CJ8NC2WfrM8U8GGBjr" ], "script_signature": "03046022100de7b67b9...", "script_signature_hex": "00493046022100de7b...", "sequence": 4294967295 } ], "outputs": [ { "transaction_hash": "0bf0de38c261959...", "output_index": 0, "value": 290000, "addresses": [ "1K4nPxBMy6sv7jssTvDLJWk1ADHBZEoUVb" ], "script": "OP_DUP OP_HASH160 c629680b8d...", "script_hex": "76a914c629680b8d13...", "script_type": "pubkeyhash", "required_signatures": 1, "spent": false, "spending_transaction": null } ], "fees": 10000, "amount": 290000 }, Transaction.DEFAULT_TRANSACTION_VERSION Returns: two1.lib.bitcoin.Transaction: a deserialized transaction derived from the provided json. """ inputs = [] outputs = [] addr_keys = set() for i in sorted(txn_json["vin"], key=lambda i: i["n"]): if 'coinbase' in i: inputs.append(CoinbaseInput(height=0, raw_script=bytes.fromhex(i['coinbase']), sequence=i['sequence'], block_version=1)) else: script = Script.from_hex(i["scriptSig"]["hex"]) inputs.append(TransactionInput(Hash(i["txid"]), i["vout"], script, i["sequence"])) if "addr" in i: addr_keys.add(i["addr"]) for o in sorted(txn_json["vout"], key=lambda o: o["n"]): script = Script.from_hex(o["scriptPubKey"]["hex"]) value = int(decimal.Decimal(str(o["value"])) * decimal.Decimal('1e8')) outputs.append(TransactionOutput(value, script)) if "addresses" in o["scriptPubKey"]: for a in o["scriptPubKey"]["addresses"]: addr_keys.add(a) txn = Transaction(Transaction.DEFAULT_TRANSACTION_VERSION, inputs, outputs, txn_json["locktime"]) assert txn.hash == Hash(txn_json['txid']) return txn, addr_keys
def txn_from_json(txn_json): """ Returns a new Transaction from a JSON-serialized transaction Args: txn_json: JSON with the following format: { "hash": "0bf0de38c26195919179f...", "block_hash": "000000000000000...", "block_height": 303404, "block_time": "2014-05-30T23:54:55Z", "chain_received_at": "2015-08-13T10:52:21.718Z", "confirmations": 69389, "lock_time": 0, "inputs": [ { "transaction_hash": "0bf0de38c2619...", "output_hash": "b84a66c46e24fe71f9...", "output_index": 0, "value": 300000, "addresses": [ "3L7dKYQGNoZub928CJ8NC2WfrM8U8GGBjr" ], "script_signature": "03046022100de7b67b9...", "script_signature_hex": "00493046022100de7b...", "sequence": 4294967295 } ], "outputs": [ { "transaction_hash": "0bf0de38c261959...", "output_index": 0, "value": 290000, "addresses": [ "1K4nPxBMy6sv7jssTvDLJWk1ADHBZEoUVb" ], "script": "OP_DUP OP_HASH160 c629680b8d...", "script_hex": "76a914c629680b8d13...", "script_type": "pubkeyhash", "required_signatures": 1, "spent": false, "spending_transaction": null } ], "fees": 10000, "amount": 290000 }, Transaction.DEFAULT_TRANSACTION_VERSION Returns: two1.lib.bitcoin.Transaction: a deserialized transaction derived from the provided json. """ inputs = [] outputs = [] addr_keys = set() for i in txn_json["inputs"]: if 'coinbase' in i: inputs.append( CoinbaseInput(height=txn_json["block_height"] or 0, raw_script=bytes.fromhex(i['coinbase']), sequence=i['sequence'], block_version=1)) else: # Script length etc. are not returned so we need to # prepend that. script, _ = Script.from_bytes( pack_var_str(bytes.fromhex(i["script_signature_hex"]))) inputs.append( TransactionInput(Hash(i["output_hash"]), i["output_index"], script, i["sequence"])) if "addresses" in i: addr_keys.add(i["addresses"][0]) for i in txn_json["outputs"]: script, _ = Script.from_bytes( pack_var_str(bytes.fromhex(i["script_hex"]))) outputs.append(TransactionOutput(i["value"], script)) if "addresses" in i: addr_keys.add(i["addresses"][0]) txn = Transaction(Transaction.DEFAULT_TRANSACTION_VERSION, inputs, outputs, txn_json["lock_time"]) return txn, addr_keys
def txn_from_json(txn_json): """ Args: txn_json: Json with the following format: { "block_hash": "0000000000000000af64802c79...", "block_height": 292586, "hash": "b4735a0690dab16b8789fceaf81c511f...", "addresses": [ "18KXZzuC3xvz6upUMQpsZzXrBwNPWZjdSa", "1AAuRETEcHDqL4VM3R97aZHP8DSUHxpkFV", "1DEP8i3QJCsomS4BSMY2RpU1upv62aGvhD", "1VxsEDjo6ZLMT99dpcLu4RQonMDVEQQTG" ], "total": 3537488, "fees": 20000, "size": 438, "preference": "medium", "relayed_by": "", "confirmed": "2014-03-26T17:08:04Z", "received": "2014-03-26T17:08:04Z", "ver": 1, "lock_time": 0, "double_spend": false, "vin_sz": 2, "vout_sz": 2, "confirmations": 64492, "confidence": 1, "inputs": [ { "prev_hash": "729f6469b59fea5da7...", "output_index": 0, "script": "483045022100d06cdad1a...", "output_value": 3500000, "sequence": 4294967295, "addresses": [ "1VxsEDjo6ZLMT99dpcLu4RQonMDVEQQTG" ], "script_type": "pay-to-pubkey-hash" }, ... ], "outputs": [ { "value": 3500000, "script": "76a9148629647bd642a237...", "addresses": [ "1DEP8i3QJCsomS4BSMY2RpU1upv62aGvhD" ], "script_type": "pay-to-pubkey-hash" } ]... Returns: An Object of type Transaction """ inputs = [] outputs = [] addr_keys = set() for i in txn_json["inputs"]: # Chain doesn't return the stuff about script length etc, so # we need to prepend that. script, _ = Script.from_bytes( pack_var_str(bytes.fromhex(i["script"]))) inputs.append( TransactionInput(Hash(i["prev_hash"]), i["output_index"], script, i["sequence"])) if "addresses" in i and i["addresses"]: addr_keys.add(i["addresses"][0]) for i in txn_json["outputs"]: script, _ = Script.from_bytes( pack_var_str(bytes.fromhex(i["script"]))) outputs.append(TransactionOutput(i["value"], script)) if "addresses" in i and i["addresses"]: addr_keys.add(i["addresses"][0]) txn = Transaction(Transaction.DEFAULT_TRANSACTION_VERSION, inputs, outputs, txn_json["lock_time"]) return txn, addr_keys
def get_transactions(self, address_list, limit=100, min_block=None): """ Provides transactions associated with each address in address_list. Args: address_list (list(str)): List of Base58Check encoded Bitcoin addresses. limit (int): Maximum number of transactions to return. min_block (int): Block height from which to start getting transactions. If None, will get transactions from the entire blockchain. Returns: dict: A dict keyed by address with each value being a list of Transaction objects. """ ret = defaultdict(list) min_block = min_block or 0 address_list_local = address_list[:] while address_list_local: addresses, address_list_local = self._pop_chunks( address_list_local, self.rate_limit_per_sec) r = self._request("GET", "/addrs/{}/full".format(";".join(addresses)), False, params={ "limit": 999999999, "after": min_block }) return_data = r.json() if isinstance(return_data, dict): return_data = [ return_data, ] received_addresses = set() for txn_data in return_data: if "error" in txn_data: continue received_addresses.add(txn_data["address"]) for txn in filter(lambda x: x["block_height"] >= min_block, txn_data["txs"]): block_hash = None if "block_hash" in txn and txn['block_hash']: block_hash = Hash(txn['block_hash']) metadata = dict( block=txn['block_height'], block_hash=block_hash, network_time=timegm( arrow.get(txn['received']).datetime.timetuple()), confirmations=txn['confirmations']) txn_obj, addr_keys = self.txn_from_json(txn) for addr in addr_keys: if addr in addresses: ret[addr].append( dict(metadata=metadata, transaction=txn_obj)) remainder = addresses - received_addresses address_list_local.extend(remainder) if remainder: # If the rate limit kicked, sleep for a little bit time.sleep(0.8) return ret