def make_blocks(count, nonce_base=30000, previous_block_hash=b'\0' * 32): blocks = [] for i in range(count): s = i * nonce_base txs = [COINBASE_TX] # + [make_tx(i) for i in range(s, s+8)] nonce = s while True: block = Block(version=1, previous_block_hash=previous_block_hash, merkle_root=b'\0'*32, timestamp=1390000000+i*600, difficulty=i, nonce=nonce, txs=txs) if block.hash()[-1] == i & 0xff: break nonce += 1 blocks.append(block) previous_block_hash = block.hash() return blocks
def block_at_height(block_height): _bitcoind = gen_bitcoind(timeout=5) while True: try: block_hash = _bitcoind.getblockhash(block_height) block = Block.parse(block_as_bytesio(_bitcoind, block_hash)) ans = (block, block_hash, block_height) # order important for merge_nulldatas logging.info("Processing results for height %d" % ans[2]) merge_nulldatas_from_block_obj(*ans, bitcoind=_bitcoind) return ans except timeout as e: logging.warning('%d, Timeout... Creating new bitcoind' % block_height) except http.client.CannotSendRequest as e: logging.warning("Got CannotSendRequest.") traceback.print_tb(e.__traceback__) except ScriptError as e: logging.error("Script parse error!") traceback.print_tb(e.__traceback__) except Exception as e: logging.warning("%d, %s, %s" % (block_height, e, type(e))) finally: _bitcoind = gen_bitcoind() logging.info("%d; Regen'd bitcoind" % block_height) sleep(0.1)
def test_getblock_p2p_non_verbose(self): hex_block ='010000006fe28c0ab6f1b372c1a6a246ae63f74f931e8365e15a089c68d6190000000000982051fd1e4ba744bbbe6' \ '80e1fee14677ba1a3c3540bf7b1cdb606e857233e0e61bc6649ffff001d01e3629901010000000100000000000000' \ '00000000000000000000000000000000000000000000000000ffffffff0704ffff001d0104ffffffff0100f2052a0' \ '100000043410496b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52da7589379515d4e0' \ 'a604f8141781e62294721166bf621e73a82cbf2342c858eeac00000000' bytes_block = binascii.unhexlify(hex_block.encode()) self.repository.headers.get_best_header.return_value = { 'block_height': 513980 } self.repository.headers.get_block_header.return_value = self.header self.repository.blockchain.get_block.return_value = None self.repository.blockchain.async_save_block.return_value = async_coro( True) self.p2p.get_block.side_effect = [ async_coro(None), async_coro({ 'block_hash': '00000000839a8e6886ab5951d76f411475428afc90947ee320161bbf18eb6048', 'block_bytes': bytes_block, 'block_object': Block.parse(io.BytesIO(bytes_block)) }) ] block = self.loop.run_until_complete( self.sut.getblock( '000000000000000000376267d342878f869cb68192ff5d73f5f1953ae83e3e1e', 0)) self.assertEqual(block, hex_block)
def test_mempool_observer(self): connection = Mock() from pycoin.tx.Tx import Tx tx = Tx.from_hex( '01000000000101112a649fd72656cf572259cb7cb61bd31ccdbdf0944070e73401565affbe629d0100000000ffffffff02608' 'de2110000000017a914d52b516c1a094462959ed6facebb94429d2cebf487d3135b0b00000000220020701a8d401c84fb13e6' 'baf169d59684e17abd9fa216c8cc5b9fc63d622ff8c58d0400473044022006b149e0cf031f57fd443bd1210b381e9b1b15094' '57ba1f49e48b803696f56e802203d66bd974ad3ac5b7591cc84e706b78d139c61e2bf1995a89c4dc0758984a2b70148304502' '2100fe7275d601080e1870517774a3ad6accaa7f8ad144addec3251e98685d4fefad02207792c2b0ed6ab42ed2ba6d12e6bd3' '4db8c6f4ac6f15e604f70ea85a735c450b1016952210375e00eb72e29da82b89367947f29ef34afb75e8654f6ea368e0acdfd' '92976b7c2103a1b26313f430c4b15bb1fdce663207659d8cac749a0e53d70eff01874496feff2103c96d495bfdd5ba4145e3e' '046fee45e84a8a48ad05bd8dbb395c011a32cf9f88053ae00000000') self.loop.run_until_complete( self.sut.on_transaction(connection, {'tx': tx})) self.assertEqual(tx.w_id(), [x for x in self.mempool_repository.get_txids()][0]) self.repository.blockchain.get_transactions_by_block_hash.return_value = [], None block = Block(1, b'0' * 32, merkle_root=merkle([tx.hash()]), timestamp=123456789, difficulty=3000000, nonce=1 * 137) block.txs.append(tx) as_bin = block.as_bin() Block.from_bin(as_bin) block_header = {'block_hash': block.id()} self.batcher_factory.add_peer.return_value = async_coro(True) self.batcher_factory.inv_item_to_future.return_value = block.as_bin() mempool_response = self.mempool_repository.get_raw_mempool(True) self.assertEqual( { '41867301a6cff5c47951aa1a4eef0be910db0cb5f154eaeb469732e1f9b54548': { 'size': 381, 'fee': 0, 'modifiedfee': 0, 'time': ANY, 'height': 0, 'descendantcount': 0, 'descendantsize': 0, 'descendantfees': 0, 'ancestorcount': 0, 'ancestorsize': 0, 'ancestorfees': 0, 'depends': [] } }, mempool_response) self.repository.blockchain.save_block.side_effect = lambda a: { 'block_object': Block.from_bin(a['block_bytes']) } self.repository.blockchain.get_transactions_by_block_hash.return_value = [], None self.loop.run_until_complete(self.sut.on_block_header(block_header)) self.assertEqual(self.mempool_repository.get_raw_mempool(True), {}) self.assertEqual([x for x in self.mempool_repository.get_txids()], [])
def get_blockheader_with_transaction_hashes(self, block_hash): URL = "%s/block/%s" % (self.base_url, b2h_rev(block_hash)) r = json.loads(urlopen(URL).read().decode("utf8")) version = r.get("version") previous_block_hash = h2b_rev(r.get("previousblockhash")) merkle_root = h2b_rev(r.get("merkleroot")) timestamp = r.get("time") difficulty = int(r.get("bits"), 16) nonce = int(r.get("nonce")) tx_hashes = [h2b_rev(tx_hash) for tx_hash in r.get("tx")] blockheader = Block(version, previous_block_hash, merkle_root, timestamp, difficulty, nonce) if blockheader.hash() != block_hash: return None, None calculated_hash = merkle(tx_hashes, double_sha256) if calculated_hash != merkle_root: return None, None blockheader.height = r.get("height") return blockheader, tx_hashes
def get_blockheader_with_transaction_hashes(self, block_hash): URL = "%s%sblock/%s" % (self.base_url, self.base_path, b2h_rev(block_hash)) r = json.loads(urlopen(URL).read().decode("utf8")) version = r.get("version") previous_block_hash = h2b_rev(r.get("previousblockhash")) merkle_root = h2b_rev(r.get("merkleroot")) timestamp = r.get("time") difficulty = int(r.get("bits"), 16) nonce = int(r.get("nonce")) tx_hashes = [h2b_rev(tx_hash) for tx_hash in r.get("tx")] blockheader = Block(version, previous_block_hash, merkle_root, timestamp, difficulty, nonce) if blockheader.hash() != block_hash: return None, None calculated_hash = merkle(tx_hashes, double_sha256) if calculated_hash != merkle_root: return None, None blockheader.height = r.get("height") return blockheader, tx_hashes
def create_bitcoinish_network(symbol, network_name, subnet_name, **kwargs): # potential kwargs: # tx, block, magic_header_hex, default_port, dns_bootstrap, # wif_prefix_hex, address_prefix_hex, pay_to_script_prefix_hex # bip32_prv_prefix_hex, bip32_pub_prefix_hex, sec_prefix, scriptTools network = Network(symbol, network_name, subnet_name) generator = kwargs.get("generator", secp256k1_generator) kwargs.setdefault("sec_prefix", "%sSEC" % symbol.upper()) KEYS_TO_H2B = ( "bip32_prv_prefix bip32_pub_prefix wif_prefix address_prefix " "pay_to_script_prefix sec_prefix magic_header").split() for k in KEYS_TO_H2B: k_hex = "%s_hex" % k if k_hex in kwargs: kwargs[k] = h2b(kwargs[k_hex]) network.script_tools = kwargs.get("scriptTools", BitcoinScriptTools) network.script_info = ScriptInfo(network.script_tools) UI_KEYS = ("bip32_prv_prefix bip32_pub_prefix wif_prefix sec_prefix " "address_prefix pay_to_script_prefix bech32_hrp").split() ui_kwargs = {k: kwargs[k] for k in UI_KEYS if k in kwargs} network.ui = UI(network.script_info, generator, **ui_kwargs) network.extras = Extras(network.script_tools, network.ui) NETWORK_KEYS = "network_name subnet_name dns_bootstrap default_port magic_header".split( ) network_kwargs = {k: kwargs.get(k) for k in NETWORK_KEYS if k in kwargs} for k in NETWORK_KEYS: if k in kwargs: setattr(network, k, kwargs[k]) network.Tx = network.tx = kwargs.get("tx") or Tx network.Block = network.block = kwargs.get("block") or Block.make_subclass( network.tx) streamer = standard_streamer( standard_parsing_functions(network.block, network.tx)) network.parse_message, network.pack_message = make_parser_and_packer( streamer, standard_messages(), standard_message_post_unpacks(streamer)) network.output_for_hwif = make_output_for_hwif(network) network.output_for_secret_exponent = make_output_for_secret_exponent( network.extras.Key) network.output_for_public_pair = make_output_for_public_pair( network.extras.Key, network) network.output_for_h160 = make_output_for_h160(network) network.BIP32Node = network.extras.BIP32Node network.Key = network.extras.Key network.ElectrumKey = network.extras.ElectrumKey network.Keychain = Keychain return network
async def on_raw_block(self, block: Block): _futures = [] if not self.transaction_publisher and not self.transaction_hash_publisher and not self.block_publisher: return self.transaction_publisher and _futures.extend( [self.on_transaction(tx) for tx in block.txs]) self.transaction_hash_publisher and _futures.extend( [self.on_transaction_hash(tx.w_hash()) for tx in block.txs]) self.block_publisher and _futures.append( self.block_publisher.on_event(block.as_bin())) _futures and await asyncio.gather(*_futures)
def make_blocks(count, nonce_base=30000, previous_block_hash=b'\0' * 32): blocks = [] for i in range(count): s = i * nonce_base txs = [COINBASE_TX] # + [make_tx(i) for i in range(s, s+8)] nonce = s while True: block = Block(version=1, previous_block_hash=previous_block_hash, merkle_root=b'\0' * 32, timestamp=1390000000 + i * 600, difficulty=i, nonce=nonce, txs=txs) if block.hash()[-1] == i & 0xff: break nonce += 1 blocks.append(block) previous_block_hash = block.hash() return blocks
def make_block(index): s = index * 30000 txs = [make_tx(i) for i in range(s, s + 8)] block = Block(version=1, previous_block_hash=b'\0' * 32, merkle_root=b'\0' * 32, timestamp=GENESIS_TIME + index, difficulty=s, nonce=s, txs=txs) return block
def make_block(i): s = i * 30000 txs = [make_tx(i) for i in range(s, s + 8)] block = Block(version=1, previous_block_hash=b'\0' * 32, merkle_root=b'\0' * 32, timestamp=1390000000 + i, difficulty=s, nonce=s, txs=txs) return block
def main(): parser = argparse.ArgumentParser(description="Dump a block in human-readable form.") parser.add_argument("block_bin", nargs="+", type=argparse.FileType('rb'), help='The file containing the binary block.') args = parser.parse_args() for f in args.block_bin: block = Block.parse(f) dump_block(block, network='M') print('')
async def on_block_header(self, blockheader: dict, i=0): try: Logger.mempool.debug('New block request: %s', blockheader['block_hash']) block_transactions, size = self.repository.blockchain.get_transactions_by_block_hash( blockheader['block_hash']) block_raw_data = (tx['transaction_bytes'] for tx in block_transactions) cached_block = block_transactions and ( blockheader['header_bytes'] + b''.join(block_raw_data)) or None try: block_object = Block.from_bin(cached_block) except: block_object = None if block_object: Logger.mempool.debug('Block %s in cache', blockheader['block_hash']) block = { 'block_object': block_object, } else: Logger.mempool.debug('Block %s not in cache, fetching', blockheader['block_hash']) block = await self.p2p.get_block(blockheader['block_hash'], timeout=15) if not block: raise exceptions.MissingResponseException Logger.mempool.debug('Block %s not cached, saving', blockheader['block_hash']) block = self.repository.blockchain.save_block(block) Logger.mempool.debug('Block %s, fetch done', blockheader['block_hash']) block_txids, removed_txids = self.repository.mempool.on_new_block( block['block_object']) Logger.mempool.debug( 'Block %s parsed by mempool repository, removed %s transactions' % (blockheader['block_hash'], len(removed_txids))) blockheader.update({"txs": block_txids}) block.update({"verbose": blockheader}) for callback in self.on_new_block_callbacks: self.loop.create_task(callback(block['block_object'])) except (exceptions.NoPeersException, exceptions.MissingResponseException) as e: if i > 10: Logger.mempool.debug( 'Block fetch for %s failed (will NOT retry)', blockheader['block_hash']) raise Logger.mempool.debug('Block fetch for %s failed (will retry)', blockheader['block_hash']) self.loop.create_task( self.delayer(self.on_block_header(blockheader, i=i + 1), 10))
async def getblock(self, blockhash: str, mode: int = 1): start = time.time() if mode == 2: raise NotImplementedError block_header = self.repository.headers.get_block_header(blockhash) if not block_header: return if mode == 1: txids, size = self.repository.blockchain.get_txids_by_block_hash( block_header['block_hash']) if txids: block = self._serialize_header(block_header) block.update({'tx': txids, 'size': size}) best_header = self.repository.headers.get_best_header() block['confirmations'] = best_header[ 'block_height'] - block_header['block_height'] + 1 Logger.p2p.info( 'Verbose block %s (%s) provided from local storage in %ss)', block_header['block_height'], blockhash, '{:.4f}'.format(time.time() - start)) return block p2p_block = await self._get_block(block_header, verbose=True) Logger.p2p.info('Verbose block %s (%s) provided from P2P in %ss)', block_header['block_height'], blockhash, '{:.4f}'.format(time.time() - start)) return p2p_block['verbose'] else: transactions, size = self.repository.blockchain.get_transactions_by_block_hash( blockhash) if transactions: Logger.p2p.info( 'Raw block %s (%s) provided from local storage in %ss)', block_header['block_height'], blockhash, '{:.4f}'.format(time.time() - start)) try: block = Block.parse(io.BytesIO( block_header['header_bytes']), include_transactions=False) block.set_txs([ Tx.from_bin(t['transaction_bytes']) for t in transactions ]) return block.as_hex() except: Logger.repository.error( 'Error loading block %s from repository, falling back to P2P' % blockhash) p2p_block = await self._get_block(block_header) Logger.p2p.info('Raw block %s (%s) provided from P2P in %ss)', block_header['block_height'], blockhash, '{:.4f}'.format(time.time() - start)) return binascii.hexlify(p2p_block['block_bytes']).decode()
def test_block(self): expected_checksum = '0000000000089F7910F6755C10EA2795EC368A29B435D80770AD78493A6FECF1'.lower( ) block_data = h2b( "010000007480150B299A16BBCE5CCDB1D1BBC65CFC5893B01E6619107C552000000000" "007900A2B203D24C69710AB6A94BEB937E1B1ADD64C2327E268D8C3E5F8B41DBED8796" "974CED66471B204C324703010000000100000000000000000000000000000000000000" "00000000000000000000000000FFFFFFFF0804ED66471B024001FFFFFFFF0100F2052A" "010000004341045FEE68BAB9915C4EDCA4C680420ED28BBC369ED84D48AC178E1F5F7E" "EAC455BBE270DABA06802145854B5E29F0A7F816E2DF906E0FE4F6D5B4C9B92940E4F0" "EDAC000000000100000001F7B30415D1A7BF6DB91CB2A272767C6799D721A4178AA328" "E0D77C199CB3B57F010000008A4730440220556F61B84F16E637836D2E74B8CB784DE4" "0C28FE3EF93CCB7406504EE9C7CAA5022043BD4749D4F3F7F831AC696748AD8D8E79AE" "B4A1C539E742AA3256910FC88E170141049A414D94345712893A828DE57B4C2054E2F5" "96CDCA9D0B4451BA1CA5F8847830B9BE6E196450E6ABB21C540EA31BE310271AA00A49" "ED0BA930743D1ED465BAD0FFFFFFFF0200E1F505000000001976A914529A63393D63E9" "80ACE6FA885C5A89E4F27AA08988ACC0ADA41A000000001976A9145D17976537F30886" "5ED533CCCFDD76558CA3C8F088AC00000000010000000165148D894D3922EF5FFDA962" "BE26016635C933D470C8B0AB7618E869E3F70E3C000000008B48304502207F5779EBF4" "834FEAEFF4D250898324EB5C0833B16D7AF4C1CB0F66F50FCF6E85022100B78A65377F" "D018281E77285EFC31E5B9BA7CB7E20E015CF6B7FA3E4A466DD195014104072AD79E0A" "A38C05FA33DD185F84C17F611E58A8658CE996D8B04395B99C7BE36529CAB7606900A0" "CD5A7AEBC6B233EA8E0FE60943054C63620E05E5B85F0426FFFFFFFF02404B4C000000" "00001976A914D4CAA8447532CA8EE4C80A1AE1D230A01E22BFDB88AC8013A0DE010000" "001976A9149661A79AE1F6D487AF3420C13E649D6DF3747FC288AC00000000") # try to parse a block block = Block.parse(io.BytesIO(block_data)) assert b2h_rev(block.hash()) == expected_checksum block.check_merkle_hash() # parse already validated block block = Block.parse(io.BytesIO(block_data), check_merkle_hash=False) assert block.as_bin() == block_data
def receive_message (self): """This method will attempt to extract a header and message. It will return a tuple of (header, message) and set whichever can be set so far (None otherwise). """ # Calculate the size of the buffer self.buffer.seek(0, os.SEEK_END) buffer_size = self.buffer.tell() # Check if a complete header is present if buffer_size < self.header_size: return (None, None) # Go to the beginning of the buffer self.buffer.seek(0) message_model = None message_header_serial = MessageHeaderSerializer() message_header = message_header_serial.deserialize(self.buffer) total_length = self.header_size + message_header.length # Incomplete message if buffer_size < total_length: self.buffer.seek(0, os.SEEK_END) return (message_header, None) payload = self.buffer.read(message_header.length) #print (codecs.encode (payload, 'hex')) remaining = self.buffer.read() self.buffer = BytesIO() self.buffer.write(remaining) payload_checksum = MessageHeaderSerializer.calc_checksum(payload) # Check if the checksum is valid if payload_checksum != message_header.checksum: msg = "Bad checksum for command %s" % message_header.command raise InvalidMessageChecksum(msg) if message_header.command in MESSAGE_MAPPING: #print (message_header.command) if message_header.command == 'block': message_model = Block.parse(BytesIO(payload)) #print (message_model.id ()) else: deserializer = MESSAGE_MAPPING[message_header.command]() message_model = deserializer.deserialize(BytesIO(payload)) return (message_header, message_model)
def receive_message(self): """This method will attempt to extract a header and message. It will return a tuple of (header, message) and set whichever can be set so far (None otherwise). """ # Calculate the size of the buffer self.buffer.seek(0, os.SEEK_END) buffer_size = self.buffer.tell() # Check if a complete header is present if buffer_size < self.header_size: return (None, None) # Go to the beginning of the buffer self.buffer.seek(0) message_model = None message_header_serial = MessageHeaderSerializer() message_header = message_header_serial.deserialize(self.buffer) total_length = self.header_size + message_header.length # Incomplete message if buffer_size < total_length: self.buffer.seek(0, os.SEEK_END) return (message_header, None) payload = self.buffer.read(message_header.length) #print (codecs.encode (payload, 'hex')) remaining = self.buffer.read() self.buffer = BytesIO() self.buffer.write(remaining) payload_checksum = MessageHeaderSerializer.calc_checksum(payload) # Check if the checksum is valid if payload_checksum != message_header.checksum: msg = "Bad checksum for command %s" % message_header.command raise InvalidMessageChecksum(msg) if message_header.command in MESSAGE_MAPPING: #print (message_header.command) if message_header.command == 'block': message_model = Block.parse(BytesIO(payload)) #print (message_model.id ()) else: deserializer = MESSAGE_MAPPING[message_header.command]() message_model = deserializer.deserialize(BytesIO(payload)) return (message_header, message_model)
def got_message(self, message): if self.last_sent + 30 * 60 < time.time(): self.send_message(bitcoin.messages.msg_ping(self.ver_send)) self.print_debug("Received: %s" % repr(message)) if message.command == "version": self.send_message(bitcoin.messages.msg_verack(self.ver_send)) self.ver_send = min(self.ver_send, message.protover) elif message.command == "verack": self.ver_recv = self.ver_send elif message.command == "inv": self.request_objects(message.inv) elif message.command == "tx": self.new_tx_callback(Tx.tx_from_hex(message.tx.serialize().encode('hex'))) elif message.command == "block": self.new_block_callback(Block.parse(cStringIO.StringIO(message.block.serialize()))) # todo - use msg_block or block stream_serialize else: self.print_debug("received unknown message %s: %s" % (message.command, repr(message)))
async def get(self, block_bytes: bytes): if not self.min_size or len(block_bytes) <= self.min_size: return Block.from_bin(block_bytes) else: q = queue.Queue() thread = threading.Thread(target=self.getblock, args=(block_bytes, q)) thread.start() try: while 1: try: data = q.get(block=False) return data except: pass await asyncio.sleep(0.1) finally: del thread
def make_headers(count, header=None): if header is None: last_hash = HASH_INITIAL_BLOCK else: last_hash = header.hash() tweak = last_hash headers = [] for i in range(count): headers.append( Block(version=1, previous_block_hash=last_hash, merkle_root=make_hash(i, tweak), timestamp=GENESIS_TIME + i * 600, difficulty=DEFAULT_DIFFICULTY, nonce=i * 137)) last_hash = headers[-1].hash() return headers
async def getblock(self, blockhash: str, mode: int=1): if mode == 2: raise NotImplementedError block_header = self.repository.headers.get_block_header(blockhash) if not block_header: return block = await self._get_block(block_header) if mode == 1: block_object = block.get('block_object', Block.parse(io.BytesIO(block['block_bytes']))) best_header = self.repository.headers.get_best_header() block['confirmations'] = best_header['block_height'] - block_header['block_height'] serialized = self._serialize_header(block_header) serialized['tx'] = [tx.id() for tx in block_object.txs] del block return serialized bb = block['block_bytes'] del block return binascii.hexlify(bb).decode()
def block_by_hash(digest): """ Get a block by hash. Args: digest: the hash of the block. Returns: A Block object. """ raw_block = get_block(digest) if raw_block is None: raw_block = requests.get( 'https://blockchain.info/rawblock/%s?format=hex' % digest).text raw_block = unhexlify(raw_block) put_block(digest, raw_block) block = Block.parse(io.BytesIO(raw_block)) return block
def _get_block_with_tx(self): from pycoin.tx.Tx import Tx tx = Tx.from_hex( '01000000000101112a649fd72656cf572259cb7cb61bd31ccdbdf0944070e73401565affbe629d0100000000ffffffff02608' 'de2110000000017a914d52b516c1a094462959ed6facebb94429d2cebf487d3135b0b00000000220020701a8d401c84fb13e6' 'baf169d59684e17abd9fa216c8cc5b9fc63d622ff8c58d0400473044022006b149e0cf031f57fd443bd1210b381e9b1b15094' '57ba1f49e48b803696f56e802203d66bd974ad3ac5b7591cc84e706b78d139c61e2bf1995a89c4dc0758984a2b70148304502' '2100fe7275d601080e1870517774a3ad6accaa7f8ad144addec3251e98685d4fefad02207792c2b0ed6ab42ed2ba6d12e6bd3' '4db8c6f4ac6f15e604f70ea85a735c450b1016952210375e00eb72e29da82b89367947f29ef34afb75e8654f6ea368e0acdfd' '92976b7c2103a1b26313f430c4b15bb1fdce663207659d8cac749a0e53d70eff01874496feff2103c96d495bfdd5ba4145e3e' '046fee45e84a8a48ad05bd8dbb395c011a32cf9f88053ae00000000') block = Block(1, b'0' * 32, merkle_root=merkle([tx.hash()]), timestamp=123456789, difficulty=3000000, nonce=1 * 137) block.txs.append(tx) return block
def locked_blocks_iterator(blockfile, start_info=(0, 0), cached_headers=50, batch_size=50): """ This method loads blocks from disk, skipping any orphan blocks. """ f = blockfile current_state = [] def change_state(bc, ops): for op, bh, work in ops: if op == 'add': current_state.append(bh) pass else: current_state.pop() bc = BlockChain() bc.add_change_callback(change_state) bhs = [] index = 0 info_offset = start_info while 1: v = blockfile.next_offset(info_offset) if v is None: break block_offset, info_offset = v f.jump_to(block_offset) bh = Block.parse_as_header(f) bh.info = block_offset bhs.append(bh) if len(bhs) > batch_size: bc.add_headers(bhs) bhs = [] if len(current_state) > cached_headers: for bh in current_state[:cached_headers]: bh.index = index yield bh index += 1 bc.lock_to_index(index) current_state = current_state[cached_headers:]
def save_block(self, block: Dict, tracker=None) -> Dict: block['size'] = len(block['block_bytes']) block['block_object'] = block.get( 'block_object', Block.from_bin(block.get('block_bytes'))) blockhash = binascii.unhexlify(block['block_hash'].encode()) txids = list() for transaction in block['block_object'].txs: self.save_transaction({ 'txid': transaction.id(), 'transaction_bytes': transaction.as_bin(), 'block_hash': blockhash }) txids.append(binascii.unhexlify(transaction.id())) self._save_block_index(blockhash, block['size'], txids) tracker and tracker.track( self.get_key(block['block_hash'], prefix=BLOCK_INDEX_PREFIX), len(block['block_bytes'])) return block
def main(): """ Method to retrieve Bitcoin blocks from downloaded files """ b = BlockChain() print(str(b.last_block_hash())) bf = Blockfiles(base_dir="E:\\Masterarbeit\\btc_node") print(bf._path_for_file_index()) block = Block.parse_as_header(bf) i = -1 for block in locked_blocks_iterator(start_info=(0, 0), base_dir="E:\\Masterarbeit\\btc_node"): i = i + 1 #if i == 100000: print("Block") print(block.version) print(block.previous_block_hash) print(block.merkle_root) print(block.timestamp) print(block.difficulty) print(block.nonce) for tx in block.txs: #mytx = Tx(tx) #print(mytx.txs_in) print("Tx") print(tx.version) print(tx.lock_time) print(tx.unspents) for tx_in in tx.txs_in: print(tx_in.address()) print("Input") #print(str(tx_in.address)) print(str(tx_in.previous_hash)) print(str(tx_in.previous_index)) print(str(tx_in.script)) print(str(tx_in.sequence)) print(str(tx_in.witness)) for tx_out in tx.txs_out: print("Output") print(tx_out.address()) print(str(tx_out.coin_value)) print(str(tx_out.script))
def test_block(self): expected_checksum = '0000000000089F7910F6755C10EA2795EC368A29B435D80770AD78493A6FECF1'.lower() block_data = h2b( "010000007480150B299A16BBCE5CCDB1D1BBC65CFC5893B01E6619107C552000000000" "007900A2B203D24C69710AB6A94BEB937E1B1ADD64C2327E268D8C3E5F8B41DBED8796" "974CED66471B204C324703010000000100000000000000000000000000000000000000" "00000000000000000000000000FFFFFFFF0804ED66471B024001FFFFFFFF0100F2052A" "010000004341045FEE68BAB9915C4EDCA4C680420ED28BBC369ED84D48AC178E1F5F7E" "EAC455BBE270DABA06802145854B5E29F0A7F816E2DF906E0FE4F6D5B4C9B92940E4F0" "EDAC000000000100000001F7B30415D1A7BF6DB91CB2A272767C6799D721A4178AA328" "E0D77C199CB3B57F010000008A4730440220556F61B84F16E637836D2E74B8CB784DE4" "0C28FE3EF93CCB7406504EE9C7CAA5022043BD4749D4F3F7F831AC696748AD8D8E79AE" "B4A1C539E742AA3256910FC88E170141049A414D94345712893A828DE57B4C2054E2F5" "96CDCA9D0B4451BA1CA5F8847830B9BE6E196450E6ABB21C540EA31BE310271AA00A49" "ED0BA930743D1ED465BAD0FFFFFFFF0200E1F505000000001976A914529A63393D63E9" "80ACE6FA885C5A89E4F27AA08988ACC0ADA41A000000001976A9145D17976537F30886" "5ED533CCCFDD76558CA3C8F088AC00000000010000000165148D894D3922EF5FFDA962" "BE26016635C933D470C8B0AB7618E869E3F70E3C000000008B48304502207F5779EBF4" "834FEAEFF4D250898324EB5C0833B16D7AF4C1CB0F66F50FCF6E85022100B78A65377F" "D018281E77285EFC31E5B9BA7CB7E20E015CF6B7FA3E4A466DD195014104072AD79E0A" "A38C05FA33DD185F84C17F611E58A8658CE996D8B04395B99C7BE36529CAB7606900A0" "CD5A7AEBC6B233EA8E0FE60943054C63620E05E5B85F0426FFFFFFFF02404B4C000000" "00001976A914D4CAA8447532CA8EE4C80A1AE1D230A01E22BFDB88AC8013A0DE010000" "001976A9149661A79AE1F6D487AF3420C13E649D6DF3747FC288AC00000000") # try to parse a block block = Block.parse(io.BytesIO(block_data)) print(block) assert b2h_rev(block.hash()) == expected_checksum for tx in block.txs: print(tx) for t in tx.txs_in: print(" %s" % t) for t in tx.txs_out: print(" %s" % t) block.check_merkle_hash()
def create_bitcoinish_network(**kwargs): # potential kwargs: # netcode, network_name, subnet_name, tx, block, magic_header_hex, default_port, dns_bootstrap, # wif_prefix_hex, address_prefix_hex, pay_to_script_prefix_hex # bip32_prv_prefix_hex, bip32_pub_prefix_hex, sec_prefix, scriptTools kwargs.setdefault("sec_prefix", "%sSEC" % kwargs["netcode"].upper()) KEYS_TO_H2B = ( "bip32_prv_prefix bip32_pub_prefix wif_prefix address_prefix " "pay_to_script_prefix sec_prefix magic_header").split() for k in KEYS_TO_H2B: k_hex = "%s_hex" % k if k_hex in kwargs: kwargs[k] = h2b(kwargs[k_hex]) scriptTools = kwargs.get("scriptTools", BitcoinScriptTools) _script_info = ScriptInfo(scriptTools) UI_KEYS = ("bip32_prv_prefix bip32_pub_prefix wif_prefix sec_prefix " "address_prefix pay_to_script_prefix bech32_hrp").split() ui_kwargs = {k: kwargs[k] for k in UI_KEYS if k in kwargs} ui = UI(_script_info, kwargs.get("generator", secp256k1_generator), **ui_kwargs) extras = Extras(scriptTools, ui) kwargs["ui"] = ui kwargs["extras"] = extras kwargs.setdefault("tx", Tx) kwargs.setdefault("block", Block.make_subclass(kwargs["tx"])) NETWORK_KEYS = ("network_name subnet_name tx block ui extras " "dns_bootstrap default_port magic_header").split() network_kwargs = {k: kwargs.get(k) for k in NETWORK_KEYS if k in kwargs} network_kwargs["code"] = kwargs["netcode"] # BRAIN DAMAGE network = Network(**network_kwargs) streamer = standard_streamer( standard_parsing_functions(network.block, network.tx)) network.parse_message, network.pack_message = make_parser_and_packer( streamer, standard_messages(), standard_message_post_unpacks(streamer)) return network
def make_blocks(count, nonce_base=30000, previous_block_hash=HASH_INITIAL_BLOCK): blocks = [] for i in range(count): s = i * nonce_base txs = [coinbase_tx(i + 1)] + [make_tx(i) for i in range(s, s + 8)] nonce = s while True: merkle_root = merkle([tx.hash() for tx in txs]) block = Block(version=1, previous_block_hash=previous_block_hash, merkle_root=merkle_root, timestamp=GENESIS_TIME + i * 600, difficulty=i, nonce=nonce) block.set_txs(txs) if block.hash()[-1] == i & 0xff: break nonce += 1 blocks.append(block) previous_block_hash = block.hash() return blocks
'02201FA9D6EE7A1763580E342474FC1AEF59B0468F98479953437F525063E25675DE014104A01F763CFBF5E518'\ 'C628939158AF3DC0CAAC35C4BA7BC1CE8B7E634E8CDC44E15F0296B250282BD649BAA8398D199F2424FCDCD88'\ 'D3A9ED186E4FD3CB9BF57CFFFFFFFFF02404B4C00000000001976A9148156FF75BEF24B35ACCE3C05289A241'\ '1E1B0E57988AC00AA38DF010000001976A914BC7E692A5FFE95A596712F5ED83393B3002E452E88AC000000'\ '0001000000019C97AFDF6C9A31FFA86D71EA79A079001E2B59EE408FD418498219400639AC0A010000008B4'\ '830450220363CFFAE09599397B21E6D8A8073FB1DFBE06B6ACDD0F2F7D3FEA86CA9C3F605022100FA255A6ED'\ '23FD825C759EF1A885A31CAD0989606CA8A3A16657D50FE3CEF5828014104FF444BAC08308B9EC97F56A652A'\ 'D8866E0BA804DA97868909999566CB377F4A2C8F1000E83B496868F3A282E1A34DF78565B65C15C3FA21A076'\ '3FD81A3DFBBB6FFFFFFFF02C05EECDE010000001976A914588554E6CC64E7343D77117DA7E01357A6111B798'\ '8AC404B4C00000000001976A914CA6EB218592F289999F13916EE32829AD587DBC588AC00000000010000000'\ '1BEF5C9225CB9FE3DEF929423FA36AAD9980B9D6F8F3070001ACF3A5FB389A69F000000004A493046022100F'\ 'B23B1E2F2FB8B96E04D220D385346290A9349F89BBBC5C225D5A56D931F8A8E022100F298EB28294B90C1BAF'\ '319DAB713E7CA721AAADD8FCC15F849DE7B0A6CF5412101FFFFFFFF0100F2052A010000001976A9146DDEA80'\ '71439951115469D0D2E2B80ECBCDD48DB88AC00000000') block_80971 = Block.parse(io.BytesIO(block_80971_data)) COINBASE_PUB_KEY_FROM_80971 = h2b("04cb6b6b4eadc96c7d08b21b29d0ada5f29f9378978cabdb602b8b65da08c8a93caab46"\ "f5abd59889bac704925942dd77a2116d10e0274cad944c71d3d1a670570") COINBASE_BYTES_FROM_80971 = h2b("04ed66471b02c301") def standard_tx(coins_from, coins_to): txs_in = [] unspents = [] for h, idx, tx_out in coins_from: txs_in.append(TxIn(h, idx)) unspents.append(tx_out) txs_out = [] for coin_value, bitcoin_address in coins_to:
def blockheader_for_offset_info(offset_info, base_dir=None): f = Blockfiles(base_dir, offset_info) block = Block.parse_as_header(f) f.close() return block
import argparse from io import BytesIO from binascii import unhexlify from pycoin.block import Block from .models import merge_nulldatas_from_block_obj from .compatibility import bitcoind if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('--block-hash', help='Notify of new block hash', type=str, required=True) args = parser.parse_args() block_hash = args.block_hash seralized_block = BytesIO(unhexlify(bitcoind.getblock(args.block_hash, False))) json_block = bitcoind.getblock(args.block_hash) block = Block.parse(seralized_block) merge_nulldatas_from_block_obj(block, block_hash, json_block['height'], verbose=True)
def test_validate(self): # block 80971 block_80971_cs = h2b( '00000000001126456C67A1F5F0FF0268F53B4F22E0531DC70C7B69746AF69DAC') block_80971_data = h2b('01000000950A1631FB9FAC411DFB173487B9E18018B7C6F7147E78C06258410000000000A881352F97F14B'\ 'F191B54915AE124E051B8FE6C3922C5082B34EAD503000FC34D891974CED66471B4016850A040100'\ '0000010000000000000000000000000000000000000000000000000000000000000000FFFFFFFF080'\ '4ED66471B02C301FFFFFFFF0100F2052A01000000434104CB6B6B4EADC96C7D08B21B29D0ADA5F29F937'\ '8978CABDB602B8B65DA08C8A93CAAB46F5ABD59889BAC704925942DD77A2116D10E0274CAD944C71D3D1A'\ '670570AC0000000001000000018C55ED829F16A4E43902940D3D33005264606D5F7D555B5F67EE4C033390'\ 'C2EB010000008A47304402202D1BF606648EDCDB124C1254930852D99188E1231715031CBEAEA80CCFD2B39A'\ '02201FA9D6EE7A1763580E342474FC1AEF59B0468F98479953437F525063E25675DE014104A01F763CFBF5E518'\ 'C628939158AF3DC0CAAC35C4BA7BC1CE8B7E634E8CDC44E15F0296B250282BD649BAA8398D199F2424FCDCD88'\ 'D3A9ED186E4FD3CB9BF57CFFFFFFFFF02404B4C00000000001976A9148156FF75BEF24B35ACCE3C05289A241'\ '1E1B0E57988AC00AA38DF010000001976A914BC7E692A5FFE95A596712F5ED83393B3002E452E88AC000000'\ '0001000000019C97AFDF6C9A31FFA86D71EA79A079001E2B59EE408FD418498219400639AC0A010000008B4'\ '830450220363CFFAE09599397B21E6D8A8073FB1DFBE06B6ACDD0F2F7D3FEA86CA9C3F605022100FA255A6ED'\ '23FD825C759EF1A885A31CAD0989606CA8A3A16657D50FE3CEF5828014104FF444BAC08308B9EC97F56A652A'\ 'D8866E0BA804DA97868909999566CB377F4A2C8F1000E83B496868F3A282E1A34DF78565B65C15C3FA21A076'\ '3FD81A3DFBBB6FFFFFFFF02C05EECDE010000001976A914588554E6CC64E7343D77117DA7E01357A6111B798'\ '8AC404B4C00000000001976A914CA6EB218592F289999F13916EE32829AD587DBC588AC00000000010000000'\ '1BEF5C9225CB9FE3DEF929423FA36AAD9980B9D6F8F3070001ACF3A5FB389A69F000000004A493046022100F'\ 'B23B1E2F2FB8B96E04D220D385346290A9349F89BBBC5C225D5A56D931F8A8E022100F298EB28294B90C1BAF'\ '319DAB713E7CA721AAADD8FCC15F849DE7B0A6CF5412101FFFFFFFF0100F2052A010000001976A9146DDEA80'\ '71439951115469D0D2E2B80ECBCDD48DB88AC00000000') # block 80974 block_80974_cs = h2b( '0000000000089F7910F6755C10EA2795EC368A29B435D80770AD78493A6FECF1') block_80974_data = h2b('010000007480150B299A16BBCE5CCDB1D1BBC65CFC5893B01E6619107C55200000000000790'\ '0A2B203D24C69710AB6A94BEB937E1B1ADD64C2327E268D8C3E5F8B41DBED8796974CED66471B204C3247030'\ '1000000010000000000000000000000000000000000000000000000000000000000000000FFFFFFFF0804ED6'\ '6471B024001FFFFFFFF0100F2052A010000004341045FEE68BAB9915C4EDCA4C680420ED28BBC369ED84D48A'\ 'C178E1F5F7EEAC455BBE270DABA06802145854B5E29F0A7F816E2DF906E0FE4F6D5B4C9B92940E4F0EDAC000'\ '000000100000001F7B30415D1A7BF6DB91CB2A272767C6799D721A4178AA328E0D77C199CB3B57F010000008'\ 'A4730440220556F61B84F16E637836D2E74B8CB784DE40C28FE3EF93CCB7406504EE9C7CAA5022043BD4749D'\ '4F3F7F831AC696748AD8D8E79AEB4A1C539E742AA3256910FC88E170141049A414D94345712893A828DE57B4C'\ '2054E2F596CDCA9D0B4451BA1CA5F8847830B9BE6E196450E6ABB21C540EA31BE310271AA00A49ED0BA930743'\ 'D1ED465BAD0FFFFFFFF0200E1F505000000001976A914529A63393D63E980ACE6FA885C5A89E4F27AA08988AC'\ 'C0ADA41A000000001976A9145D17976537F308865ED533CCCFDD76558CA3C8F088AC000000000100000001651'\ '48D894D3922EF5FFDA962BE26016635C933D470C8B0AB7618E869E3F70E3C000000008B48304502207F5779EB'\ 'F4834FEAEFF4D250898324EB5C0833B16D7AF4C1CB0F66F50FCF6E85022100B78A65377FD018281E77285EFC3'\ '1E5B9BA7CB7E20E015CF6B7FA3E4A466DD195014104072AD79E0AA38C05FA33DD185F84C17F611E58A8658CE'\ '996D8B04395B99C7BE36529CAB7606900A0CD5A7AEBC6B233EA8E0FE60943054C63620E05E5B85F0426FFFFF'\ 'FFF02404B4C00000000001976A914D4CAA8447532CA8EE4C80A1AE1D230A01E22BFDB88AC8013A0DE0100000'\ '01976A9149661A79AE1F6D487AF3420C13E649D6DF3747FC288AC00000000') block_80971 = Block.parse(io.BytesIO(block_80971_data)) block_80974 = Block.parse(io.BytesIO(block_80974_data)) tx_db = {tx.hash(): tx for tx in block_80971.txs} tx_to_validate = block_80974.txs[2] self.assertEqual( "OP_DUP OP_HASH160 [d4caa8447532ca8ee4c80a1ae1d230a01e22bfdb] OP_EQUALVERIFY OP_CHECKSIG", tools.disassemble(tx_to_validate.txs_out[0].script)) self.assertEqual( tx_to_validate.id(), "7c4f5385050c18aa8df2ba50da566bbab68635999cc99b75124863da1594195b") tx_to_validate.unspents_from_db(tx_db) self.assertEqual(tx_to_validate.bad_signature_count(), 0) # now, let's corrupt the Tx and see what happens tx_out = tx_to_validate.txs_out[1] disassembly = tools.disassemble(tx_out.script) tx_out.script = tools.compile(disassembly) self.assertEqual(tx_to_validate.bad_signature_count(), 0) disassembly = disassembly.replace( "9661a79ae1f6d487af3420c13e649d6df3747fc2", "9661a79ae1f6d487af3420c13e649d6df3747fc3") tx_out.script = tools.compile(disassembly) self.assertEqual(tx_to_validate.bad_signature_count(), 1) self.assertFalse(tx_to_validate.is_signature_ok(0))
def test_validate(self): # block 80971 block_80971_cs = h2b("00000000001126456C67A1F5F0FF0268F53B4F22E0531DC70C7B69746AF69DAC") block_80971_data = h2b( "01000000950A1631FB9FAC411DFB173487B9E18018B7C6F7147E78C06258410000000000A881352F97F14B" "F191B54915AE124E051B8FE6C3922C5082B34EAD503000FC34D891974CED66471B4016850A040100" "0000010000000000000000000000000000000000000000000000000000000000000000FFFFFFFF080" "4ED66471B02C301FFFFFFFF0100F2052A01000000434104CB6B6B4EADC96C7D08B21B29D0ADA5F29F937" "8978CABDB602B8B65DA08C8A93CAAB46F5ABD59889BAC704925942DD77A2116D10E0274CAD944C71D3D1A" "670570AC0000000001000000018C55ED829F16A4E43902940D3D33005264606D5F7D555B5F67EE4C033390" "C2EB010000008A47304402202D1BF606648EDCDB124C1254930852D99188E1231715031CBEAEA80CCFD2B39A" "02201FA9D6EE7A1763580E342474FC1AEF59B0468F98479953437F525063E25675DE014104A01F763CFBF5E518" "C628939158AF3DC0CAAC35C4BA7BC1CE8B7E634E8CDC44E15F0296B250282BD649BAA8398D199F2424FCDCD88" "D3A9ED186E4FD3CB9BF57CFFFFFFFFF02404B4C00000000001976A9148156FF75BEF24B35ACCE3C05289A241" "1E1B0E57988AC00AA38DF010000001976A914BC7E692A5FFE95A596712F5ED83393B3002E452E88AC000000" "0001000000019C97AFDF6C9A31FFA86D71EA79A079001E2B59EE408FD418498219400639AC0A010000008B4" "830450220363CFFAE09599397B21E6D8A8073FB1DFBE06B6ACDD0F2F7D3FEA86CA9C3F605022100FA255A6ED" "23FD825C759EF1A885A31CAD0989606CA8A3A16657D50FE3CEF5828014104FF444BAC08308B9EC97F56A652A" "D8866E0BA804DA97868909999566CB377F4A2C8F1000E83B496868F3A282E1A34DF78565B65C15C3FA21A076" "3FD81A3DFBBB6FFFFFFFF02C05EECDE010000001976A914588554E6CC64E7343D77117DA7E01357A6111B798" "8AC404B4C00000000001976A914CA6EB218592F289999F13916EE32829AD587DBC588AC00000000010000000" "1BEF5C9225CB9FE3DEF929423FA36AAD9980B9D6F8F3070001ACF3A5FB389A69F000000004A493046022100F" "B23B1E2F2FB8B96E04D220D385346290A9349F89BBBC5C225D5A56D931F8A8E022100F298EB28294B90C1BAF" "319DAB713E7CA721AAADD8FCC15F849DE7B0A6CF5412101FFFFFFFF0100F2052A010000001976A9146DDEA80" "71439951115469D0D2E2B80ECBCDD48DB88AC00000000" ) # block 80974 block_80974_cs = h2b("0000000000089F7910F6755C10EA2795EC368A29B435D80770AD78493A6FECF1") block_80974_data = h2b( "010000007480150B299A16BBCE5CCDB1D1BBC65CFC5893B01E6619107C55200000000000790" "0A2B203D24C69710AB6A94BEB937E1B1ADD64C2327E268D8C3E5F8B41DBED8796974CED66471B204C3247030" "1000000010000000000000000000000000000000000000000000000000000000000000000FFFFFFFF0804ED6" "6471B024001FFFFFFFF0100F2052A010000004341045FEE68BAB9915C4EDCA4C680420ED28BBC369ED84D48A" "C178E1F5F7EEAC455BBE270DABA06802145854B5E29F0A7F816E2DF906E0FE4F6D5B4C9B92940E4F0EDAC000" "000000100000001F7B30415D1A7BF6DB91CB2A272767C6799D721A4178AA328E0D77C199CB3B57F010000008" "A4730440220556F61B84F16E637836D2E74B8CB784DE40C28FE3EF93CCB7406504EE9C7CAA5022043BD4749D" "4F3F7F831AC696748AD8D8E79AEB4A1C539E742AA3256910FC88E170141049A414D94345712893A828DE57B4C" "2054E2F596CDCA9D0B4451BA1CA5F8847830B9BE6E196450E6ABB21C540EA31BE310271AA00A49ED0BA930743" "D1ED465BAD0FFFFFFFF0200E1F505000000001976A914529A63393D63E980ACE6FA885C5A89E4F27AA08988AC" "C0ADA41A000000001976A9145D17976537F308865ED533CCCFDD76558CA3C8F088AC000000000100000001651" "48D894D3922EF5FFDA962BE26016635C933D470C8B0AB7618E869E3F70E3C000000008B48304502207F5779EB" "F4834FEAEFF4D250898324EB5C0833B16D7AF4C1CB0F66F50FCF6E85022100B78A65377FD018281E77285EFC3" "1E5B9BA7CB7E20E015CF6B7FA3E4A466DD195014104072AD79E0AA38C05FA33DD185F84C17F611E58A8658CE" "996D8B04395B99C7BE36529CAB7606900A0CD5A7AEBC6B233EA8E0FE60943054C63620E05E5B85F0426FFFFF" "FFF02404B4C00000000001976A914D4CAA8447532CA8EE4C80A1AE1D230A01E22BFDB88AC8013A0DE0100000" "01976A9149661A79AE1F6D487AF3420C13E649D6DF3747FC288AC00000000" ) block_80971 = Block.parse(io.BytesIO(block_80971_data)) block_80974 = Block.parse(io.BytesIO(block_80974_data)) tx_db = {tx.hash(): tx for tx in block_80971.txs} def tx_out_for_hash_index_f(tx_hash, tx_out_idx): tx = tx_db.get(tx_hash) return tx.txs_out[tx_out_idx] tx_to_validate = block_80974.txs[2] self.assertEqual( "OP_DUP OP_HASH160 d4caa8447532ca8ee4c80a1ae1d230a01e22bfdb OP_EQUALVERIFY OP_CHECKSIG", tools.disassemble(tx_to_validate.txs_out[0].script), ) self.assertEqual(tx_to_validate.id(), "7c4f5385050c18aa8df2ba50da566bbab68635999cc99b75124863da1594195b") tx_to_validate.validate(tx_out_for_hash_index_f) # now, let's corrupt the Tx and see what happens tx_out = tx_to_validate.txs_out[1] disassembly = tools.disassemble(tx_out.script) tx_out.script = tools.compile(disassembly) tx_to_validate.validate(tx_out_for_hash_index_f) disassembly = disassembly.replace( "9661a79ae1f6d487af3420c13e649d6df3747fc2", "9661a79ae1f6d487af3420c13e649d6df3747fc3" ) tx_out.script = tools.compile(disassembly) with self.assertRaises(ValidationFailureError) as cm: tx_to_validate.validate(tx_out_for_hash_index_f) exception = cm.exception self.assertEqual( exception.args[0], "Tx 3c0ef7e369e81876abb0c870d433c935660126be62a9fd5fef22394d898d1465 TxIn index 0 script did not verify", )
#!/usr/bin/env python # -*- coding: utf-8 -*- from pycoin.block import Block import io, struct import x13bcd_hash def uint256_from_str(s): r = 0L t = struct.unpack("<IIIIIIII", s[:32]) for i in xrange(8): r += t[i] << (i * 32) return r rawblock = '00000060b38cb32058b774ea735c0f6cfaf3fb0153a03496be87208219717f4e4ad744ee5dc71fd1295bfe406e03e23877b2b9a64be33f2a2f1e05a238ef6fdf04bb574ca24ec05b6394001b00cfc14c02010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff4c0335350804a24ec05b08fabe6d6d00000000000003600000000000003960000039da4b3161e0000039da4b3161c8010000000000000030d6c52e000000000d2f6e6f64655374726174756d2f00000000020000000000000000266a24aa21a9edd76d419006b113123076afec5868f6b6e3dfde054e82adf4a9693d2a1cc58b256880814a000000001976a914829a4b1a792e9e1514ae902d622aed18a99601b088ac01200000000000000000000000000000000000000000000000000000000000000000000000000c000000d13244a2054b9b33808e72daabd3d9a7c5685b10abd7a4b932f273dd498f131601d4063756d72c0f08af818741b198c97ecf1ae065c464a0ef08ea7c4f0679089d010000006a4730440220243539823fb5fe8fdb8991dda0e5f7e7cf8e3beee36e2dbce6b905e58d142373022058996510c2edf2fbbe77f77016cbfafbe53cc4505e58de05083194af714c50000121021e6a4b6673be9f25e54f4028ebf8459bd2d0726d836dbcee8cecaa856bec458bffffffff0280778e06000000001976a91481b2e127d9c7fec0fbc6b1a1784ba421acf3889d88ac67ccd617000000001976a9145f2bc24285493cafc8ef44b35a5ffe63c5eab5ce88ac00000000' b = Block.parse_as_header(io.BytesIO(rawblock.decode('hex'))) hash_bin = x13bcd_hash.getPoWHash(b.as_bin()) print uint256_from_str(hash_bin)
'02201FA9D6EE7A1763580E342474FC1AEF59B0468F98479953437F525063E25675DE014104A01F763CFBF5E518'\ 'C628939158AF3DC0CAAC35C4BA7BC1CE8B7E634E8CDC44E15F0296B250282BD649BAA8398D199F2424FCDCD88'\ 'D3A9ED186E4FD3CB9BF57CFFFFFFFFF02404B4C00000000001976A9148156FF75BEF24B35ACCE3C05289A241'\ '1E1B0E57988AC00AA38DF010000001976A914BC7E692A5FFE95A596712F5ED83393B3002E452E88AC000000'\ '0001000000019C97AFDF6C9A31FFA86D71EA79A079001E2B59EE408FD418498219400639AC0A010000008B4'\ '830450220363CFFAE09599397B21E6D8A8073FB1DFBE06B6ACDD0F2F7D3FEA86CA9C3F605022100FA255A6ED'\ '23FD825C759EF1A885A31CAD0989606CA8A3A16657D50FE3CEF5828014104FF444BAC08308B9EC97F56A652A'\ 'D8866E0BA804DA97868909999566CB377F4A2C8F1000E83B496868F3A282E1A34DF78565B65C15C3FA21A076'\ '3FD81A3DFBBB6FFFFFFFF02C05EECDE010000001976A914588554E6CC64E7343D77117DA7E01357A6111B798'\ '8AC404B4C00000000001976A914CA6EB218592F289999F13916EE32829AD587DBC588AC00000000010000000'\ '1BEF5C9225CB9FE3DEF929423FA36AAD9980B9D6F8F3070001ACF3A5FB389A69F000000004A493046022100F'\ 'B23B1E2F2FB8B96E04D220D385346290A9349F89BBBC5C225D5A56D931F8A8E022100F298EB28294B90C1BAF'\ '319DAB713E7CA721AAADD8FCC15F849DE7B0A6CF5412101FFFFFFFF0100F2052A010000001976A9146DDEA80'\ '71439951115469D0D2E2B80ECBCDD48DB88AC00000000'); block_80971 = Block.parse(io.BytesIO(block_80971_data)) COINBASE_PUB_KEY_FROM_80971 = h2b("04cb6b6b4eadc96c7d08b21b29d0ada5f29f9378978cabdb602b8b65da08c8a93caab46"\ "f5abd59889bac704925942dd77a2116d10e0274cad944c71d3d1a670570") COINBASE_BYTES_FROM_80971 = h2b("04ed66471b02c301") def standard_tx(coins_from, coins_to): txs_in = [] unspents = [] for h, idx, tx_out in coins_from: txs_in.append(TxIn(h, idx)) unspents.append(tx_out) txs_out = [] for coin_value, bitcoin_address in coins_to: txs_out.append(TxOut(coin_value, standard_tx_out_script(bitcoin_address)))
def block(args, parser): for f in args.block_file: block = Block.parse(f) dump_block(block) print('')
def getblock(data, q): q.put(Block.from_bin(data))
#!/usr/bin/env python3 import argparse import json import sys import io from binascii import unhexlify, hexlify from bitcoinrpc import connect_to_local from pycoin.block import Block from models import txs, known_txs, unprocessed_txs, addr_to_uid, Account, known_blocks, all_addresses from wallet import process_tx_initial parser = argparse.ArgumentParser() parser.add_argument('--blockhash') args = parser.parse_args() blockhash = args.blockhash if blockhash in known_blocks: sys.exit() bitcoind = connect_to_local() block_hex = bitcoind.getblock(blockhash, False) block = Block.parse(io.BytesIO(unhexlify(block_hex))) for tx in block.txs: process_tx_initial(tx) known_blocks.add(blockhash)
def create_bitcoinish_network(symbol, network_name, subnet_name, **kwargs): # potential kwargs: # tx, block, magic_header_hex, default_port, dns_bootstrap, # wif_prefix_hex, address_prefix_hex, pay_to_script_prefix_hex # bip32_prv_prefix_hex, bip32_pub_prefix_hex, sec_prefix, script_tools # bip49_prv_prefix, bip49_pub_prefix, bip84_prv_prefix, bip84_pub_prefix network = Network(symbol, network_name, subnet_name) generator = kwargs.get("generator", secp256k1_generator) kwargs.setdefault("sec_prefix", "%sSEC" % symbol.upper()) KEYS_TO_H2B = ( "bip32_prv_prefix bip32_pub_prefix bip49_prv_prefix bip49_pub_prefix " "bip84_prv_prefix bip84_pub_prefix wif_prefix address_prefix " "pay_to_script_prefix sec_prefix magic_header").split() for k in KEYS_TO_H2B: k_hex = "%s_hex" % k if k_hex in kwargs: kwargs[k] = h2b(kwargs[k_hex]) script_tools = kwargs.get("script_tools", BitcoinScriptTools) UI_KEYS = ( "bip32_prv_prefix bip32_pub_prefix bip49_prv_prefix bip49_pub_prefix " "bip84_prv_prefix bip84_pub_prefix wif_prefix sec_prefix " "address_prefix pay_to_script_prefix bech32_hrp").split() ui_kwargs = {k: kwargs[k] for k in UI_KEYS if k in kwargs} _bip32_prv_prefix = ui_kwargs.get("bip32_prv_prefix") _bip32_pub_prefix = ui_kwargs.get("bip32_pub_prefix") _wif_prefix = ui_kwargs.get("wif_prefix") _sec_prefix = ui_kwargs.get("sec_prefix") def bip32_as_string(blob, as_private): prefix = ui_kwargs.get("bip32_%s_prefix" % ("prv" if as_private else "pub")) return b2a_hashed_base58(prefix + blob) def bip49_as_string(blob, as_private): prefix = ui_kwargs.get("bip49_%s_prefix" % ("prv" if as_private else "pub")) return b2a_hashed_base58(prefix + blob) def bip84_as_string(blob, as_private): prefix = ui_kwargs.get("bip84_%s_prefix" % ("prv" if as_private else "pub")) return b2a_hashed_base58(prefix + blob) def wif_for_blob(blob): return b2a_hashed_base58(_wif_prefix + blob) def sec_text_for_blob(blob): return _sec_prefix + b2h(blob) NetworkKey = Key.make_subclass(symbol, network=network, generator=generator) NetworkElectrumKey = ElectrumWallet.make_subclass(symbol, network=network, generator=generator) NetworkBIP32Node = BIP32Node.make_subclass(symbol, network=network, generator=generator) NetworkBIP49Node = BIP49Node.make_subclass(symbol, network=network, generator=generator) NetworkBIP84Node = BIP84Node.make_subclass(symbol, network=network, generator=generator) NETWORK_KEYS = "network_name subnet_name dns_bootstrap default_port magic_header".split( ) for k in NETWORK_KEYS: if k in kwargs: setattr(network, k, kwargs[k]) network.Tx = network.tx = kwargs.get("tx") or Tx network.Block = network.block = kwargs.get("block") or Block.make_subclass( symbol, network.tx) streamer = standard_streamer( standard_parsing_functions(network.block, network.tx)) network.message = API() network.message.parse, network.message.pack = make_parser_and_packer( streamer, standard_messages(), standard_message_post_unpacks(streamer)) network.output_for_secret_exponent = make_output_for_secret_exponent( NetworkKey) network.output_for_public_pair = make_output_for_public_pair( NetworkKey, network) network.keychain = Keychain parse_api_class = kwargs.get("parse_api_class", ParseAPI) network.parse = parse_api_class(network, **ui_kwargs) network.contract = ContractAPI(network, script_tools) network.address = make_address_api(network.contract, **ui_kwargs) def keys_private(secret_exponent, is_compressed=True): return NetworkKey(secret_exponent=secret_exponent, is_compressed=is_compressed) def keys_public(item, is_compressed=None): if isinstance(item, tuple): if is_compressed is None: is_compressed = True # it's a public pair return NetworkKey(public_pair=item, is_compressed=is_compressed) if is_compressed is not None: raise ValueError("can't set is_compressed from sec") return NetworkKey.from_sec(item) network.keys = API() network.keys.private = keys_private network.keys.public = keys_public def electrum_seed(seed): return NetworkElectrumKey(initial_key=seed) def electrum_private(master_private_key): return NetworkElectrumKey(master_private_key=master_private_key) def electrum_public(master_public_key): return NetworkElectrumKey(master_public_key=master_public_key) network.keys.bip32_seed = NetworkBIP32Node.from_master_secret network.keys.bip32_deserialize = NetworkBIP32Node.deserialize network.keys.bip49_deserialize = NetworkBIP49Node.deserialize network.keys.bip84_deserialize = NetworkBIP84Node.deserialize network.keys.electrum_seed = electrum_seed network.keys.electrum_private = electrum_private network.keys.electrum_public = electrum_public network.keys.InvalidSecretExponentError = InvalidSecretExponentError network.keys.InvalidPublicPairError = InvalidPublicPairError network.msg = API() message_signer = MessageSigner(network, generator) network.msg.sign = message_signer.sign_message network.msg.verify = message_signer.verify_message network.msg.parse_signed = message_signer.parse_signed_message network.msg.hash_for_signing = message_signer.hash_for_signing network.msg.signature_for_message_hash = message_signer.signature_for_message_hash network.msg.pair_for_message_hash = message_signer.pair_for_message_hash network.script = script_tools network.bip32_as_string = bip32_as_string network.bip49_as_string = bip49_as_string network.sec_text_for_blob = sec_text_for_blob network.wif_for_blob = wif_for_blob def network_build_hash160_lookup(iter): return build_hash160_lookup(iter, [generator]) network.tx.solve = API() network.tx.solve.build_hash160_lookup = network_build_hash160_lookup network.tx.solve.build_p2sh_lookup = build_p2sh_lookup network.tx.solve.build_sec_lookup = build_sec_lookup network.validator = API() network.validator.ScriptError = ScriptError network.validator.ValidationFailureError = ValidationFailureError network.validator.errno = errno network.validator.flags = flags def my_create_tx(*args, **kwargs): return create_tx(network, *args, **kwargs) def my_sign_tx(*args, **kwargs): return sign_tx(network, *args, **kwargs) def my_create_signed_tx(*args, **kwargs): return create_signed_tx(network, *args, **kwargs) def my_split_with_remainder(*args, **kwargs): return split_with_remainder(network, *args, **kwargs) network.tx_utils = API() network.tx_utils.create_tx = my_create_tx network.tx_utils.sign_tx = my_sign_tx network.tx_utils.create_signed_tx = my_create_signed_tx network.tx_utils.split_with_remainder = my_split_with_remainder network.tx_utils.distribute_from_split_pool = distribute_from_split_pool network.annotate = Annotate(script_tools, network.address) network.who_signed = WhoSigned(script_tools, network.address, generator) network.str = parseable_str network.generator = generator return network