def test_callback(): R = [] def the_callback(blockchain, ops): R.extend(ops) parent_for_0 = b'\0' * 32 # same as test_fork, above BC = BlockChain(parent_for_0) BC.add_change_callback(the_callback) ITEMS = dict((i, FakeBlock(i)) for i in range(7)) ITEMS[0] = FakeBlock(0, parent_for_0) ITEMS.update(dict((i, FakeBlock(i)) for i in range(301, 306))) ITEMS[301] = FakeBlock(301, 3) # send them all except 302 ops = BC.add_headers((ITEMS[i] for i in ITEMS.keys() if i != 302)) # now send 302 ops = BC.add_headers([ITEMS[302]]) expected = [("add", ITEMS[i], i) for i in range(7)] expected += [("remove", ITEMS[i], i) for i in range(6, 3, -1)] expected += [("add", ITEMS[i], i+4-301) for i in range(301,306)] assert R == expected
def test_fork(): parent_for_0 = b'\0' * 32 # 0 <= 1 <= ... <= 5 <= 6 # 3 <= 301 <= 302 <= 303 <= 304 <= 305 # parent_for_0 = "motherless" BC = BlockChain(parent_for_0) ITEMS = dict((i, FakeBlock(i)) for i in range(7)) ITEMS[0] = FakeBlock(0, parent_for_0) ITEMS.update(dict((i, FakeBlock(i)) for i in range(301, 306))) ITEMS[301] = FakeBlock(301, 3) assert longest_block_chain(BC) == [] assert BC.locked_length() == 0 assert BC.length() == 0 assert set(BC.chain_finder.missing_parents()) == set() # send them all except 302 ops = BC.add_headers((ITEMS[i] for i in ITEMS.keys() if i != 302)) assert ops == [("add", ITEMS[i], i) for i in range(7)] assert set(BC.chain_finder.missing_parents()) == set([parent_for_0, 302]) # now send 302 ops = BC.add_headers([ITEMS[302]]) # we should see a change expected = [("remove", ITEMS[i], i) for i in range(6, 3, -1)] expected += [("add", ITEMS[i], i+4-301) for i in range(301, 306)] assert ops == expected assert set(BC.chain_finder.missing_parents()) == set([parent_for_0])
def blockchain(self): """Returns the current Blockchain object.""" if self.client: return self.client.blockhandler.block_chain else: blockchain = BlockChain( did_lock_to_index_f=self.bcs.did_lock_to_index) blockchain.preload_locked_blocks(self.bcs.headers()) return blockchain
def test_callback(): R = [] def the_callback(blockchain, ops): R.extend(ops) parent_for_0 = b'\0' * 32 # same as test_fork, above BC = BlockChain(parent_for_0) BC.add_change_callback(the_callback) ITEMS = dict((i, FakeBlock(i)) for i in range(7)) ITEMS[0] = FakeBlock(0, parent_for_0) ITEMS.update(dict((i, FakeBlock(i)) for i in range(301, 306))) ITEMS[301] = FakeBlock(301, 3) # send them all except 302 BC.add_headers((ITEMS[i] for i in ITEMS.keys() if i != 302)) # now send 302 BC.add_headers([ITEMS[302]]) expected = [("add", ITEMS[i], i) for i in range(7)] expected += [("remove", ITEMS[i], i) for i in range(6, 3, -1)] expected += [("add", ITEMS[i], i + 4 - 301) for i in range(301, 306)] assert R == expected
def items_for_client(initial_blocks=[]): block_store = {} block_chain = BlockChain() blockfetcher = Blockfetcher() inv_collector = InvCollector() block_handler = BlockHandler(inv_collector, block_chain, block_store) fast_forward_add_peer = fast_forwarder_add_peer_f(block_chain) for block in initial_blocks: inv_collector.advertise_item(InvItem(ITEM_TYPE_BLOCK, block.hash())) block_store[block.hash()] = block block_chain.add_headers(initial_blocks) inv_q = inv_collector.new_inv_item_queue() ap = make_add_peer(fast_forward_add_peer, blockfetcher, block_handler, inv_collector, block_chain, block_store) ap.block_getter_task = asyncio.Task(block_getter(inv_q, inv_collector, block_handler, block_chain, block_store)) return block_handler, block_chain, block_store, ap
def items_for_client(initial_blocks=[]): block_store = {} block_chain = BlockChain() blockfetcher = Blockfetcher() inv_collector = InvCollector() block_handler = BlockHandler(inv_collector, block_chain, block_store) fast_forward_add_peer = fast_forwarder_add_peer_f(block_chain) for block in initial_blocks: inv_collector.advertise_item(InvItem(ITEM_TYPE_BLOCK, block.hash())) block_store[block.hash()] = block block_chain.add_headers(initial_blocks) inv_q = inv_collector.new_inv_item_queue() ap = make_add_peer(fast_forward_add_peer, blockfetcher, block_handler, inv_collector, block_chain, block_store) ap.block_getter_task = asyncio.Task( block_getter(inv_q, inv_collector, block_handler, block_chain, block_store)) return block_handler, block_chain, block_store, ap
def main(): """ Method to retrieve Bitcoin blocks from downloaded files """ b = BlockChain() print(str(b.last_block_hash())) bf = Blockfiles(base_dir="E:\\Masterarbeit\\btc_node") print(bf._path_for_file_index()) block = Block.parse_as_header(bf) i = -1 for block in locked_blocks_iterator(start_info=(0, 0), base_dir="E:\\Masterarbeit\\btc_node"): i = i + 1 #if i == 100000: print("Block") print(block.version) print(block.previous_block_hash) print(block.merkle_root) print(block.timestamp) print(block.difficulty) print(block.nonce) for tx in block.txs: #mytx = Tx(tx) #print(mytx.txs_in) print("Tx") print(tx.version) print(tx.lock_time) print(tx.unspents) for tx_in in tx.txs_in: print(tx_in.address()) print("Input") #print(str(tx_in.address)) print(str(tx_in.previous_hash)) print(str(tx_in.previous_index)) print(str(tx_in.script)) print(str(tx_in.sequence)) print(str(tx_in.witness)) for tx_out in tx.txs_out: print("Output") print(tx_out.address()) print(str(tx_out.coin_value)) print(str(tx_out.script))
def test_fork(): parent_for_0 = b'\0' * 32 # 0 <= 1 <= ... <= 5 <= 6 # 3 <= 301 <= 302 <= 303 <= 304 <= 305 # parent_for_0 = "motherless" BC = BlockChain(parent_for_0) ITEMS = dict((i, FakeBlock(i)) for i in range(7)) ITEMS[0] = FakeBlock(0, parent_for_0) ITEMS.update(dict((i, FakeBlock(i)) for i in range(301, 306))) ITEMS[301] = FakeBlock(301, 3) assert longest_block_chain(BC) == [] assert BC.locked_length() == 0 assert BC.length() == 0 assert set(BC.chain_finder.missing_parents()) == set() # send them all except 302 ops = BC.add_headers((ITEMS[i] for i in ITEMS.keys() if i != 302)) assert ops == [("add", ITEMS[i], i) for i in range(7)] assert set(BC.chain_finder.missing_parents()) == set([parent_for_0, 302]) # now send 302 ops = BC.add_headers([ITEMS[302]]) # we should see a change expected = [("remove", ITEMS[i], i) for i in range(6, 3, -1)] expected += [("add", ITEMS[i], i + 4 - 301) for i in range(301, 306)] assert ops == expected assert set(BC.chain_finder.missing_parents()) == set([parent_for_0])
def run_client(peer_list, block_list): block_chain = BlockChain() block_store = {} inv_collector = InvCollector() block_handler = BlockHandler(inv_collector, block_chain, block_store) for peer in peer_list: inv_collector.add_peer(peer) block_handler.add_peer(peer) for block in block_list: inv_collector.advertise_item(InvItem(ITEM_TYPE_BLOCK, block.hash())) block_store[block.hash()] = block while len(block_store) < 2: yield from asyncio.sleep(0.1) return block_store
def test_large(): SIZE = 3000 ITEMS = [FakeBlock(i) for i in range(SIZE)] ITEMS[0] = FakeBlock(0, parent_for_0) BC = BlockChain(parent_for_0) assert longest_block_chain(BC) == [] assert BC.locked_length() == 0 assert BC.length() == 0 assert set(BC.chain_finder.missing_parents()) == set() ops = BC.add_headers(ITEMS) assert ops == [("add", ITEMS[i], i) for i in range(SIZE)] assert longest_block_chain(BC) == list(range(SIZE)) assert set(BC.chain_finder.missing_parents()) == {parent_for_0} assert BC.parent_hash == parent_for_0 assert BC.locked_length() == 0 assert BC.length() == SIZE for i in range(SIZE): v = BC.tuple_for_index(i) assert v[0] == i assert v[1] == parent_for_0 if i == 0 else i assert BC.index_for_hash(-1) is None
def locked_blocks_iterator(blockfile, start_info=(0, 0), cached_headers=50, batch_size=50): """ This method loads blocks from disk, skipping any orphan blocks. """ f = blockfile current_state = [] def change_state(bc, ops): for op, bh, work in ops: if op == 'add': current_state.append(bh) pass else: current_state.pop() bc = BlockChain() bc.add_change_callback(change_state) bhs = [] index = 0 info_offset = start_info while 1: v = blockfile.next_offset(info_offset) if v is None: break block_offset, info_offset = v f.jump_to(block_offset) bh = Block.parse_as_header(f) bh.info = block_offset bhs.append(bh) if len(bhs) > batch_size: bc.add_headers(bhs) bhs = [] if len(current_state) > cached_headers: for bh in current_state[:cached_headers]: bh.index = index yield bh index += 1 bc.lock_to_index(index) current_state = current_state[cached_headers:]
def locked_blocks_iterator(start_info=(0, 0), cached_headers=50, batch_size=50, base_dir=None, headers_only=False): """ This method loads blocks from disk, skipping any orphan blocks. """ block_class = BlockHeader if headers_only else Block f = Blockfiles(base_dir, start_info) for initial_location in block_info_iterator(start_info, base_dir): f.jump_to(initial_location) initial_header = BlockHeader.parse(f) break current_state = [] def change_state(bc, ops): for op, bh, work in ops: if op == 'add': current_state.append(bh) pass else: current_state.pop() bc = BlockChain() bc.add_change_callback(change_state) bhs = [] index = 0 for info in block_info_iterator(start_info, base_dir): bh = blockheader_for_offset_info(info, base_dir) bh.info = info bhs.append(bh) if len(bhs) > batch_size: bc.add_headers(bhs) bhs = [] if len(current_state) > cached_headers: for bh in current_state[:cached_headers]: f.jump_to(bh.info) block = block_class.parse(f) yield block index += 1 bc.lock_to_index(index) current_state = current_state[cached_headers:]
def test_simple_getheader(): BLOCKS = make_blocks(20) blockchain1 = BlockChain() blockchain1.add_headers(BLOCKS) block_store = dict((b.hash(), b) for b in BLOCKS) peer1, peer2 = create_handshaked_peers() block_store = {} block_chain = BlockChain() inv_collector = InvCollector() block_handler = BlockHandler(inv_collector, block_chain, block_store) for block in BLOCKS: inv_collector.advertise_item(InvItem(ITEM_TYPE_BLOCK, block.hash())) block_store[block.hash()] = block block_chain.add_headers(BLOCKS) inv_collector.add_peer(peer1) block_handler.add_peer(peer1) @asyncio.coroutine def run_peer2(): r = [] headers = yield from standards.get_headers_hashes( peer2, until_block_hash=b'\0' * 32) r.append(headers) return r f2 = asyncio.Task(run_peer2()) asyncio.get_event_loop().run_until_complete(asyncio.wait([f2])) r = f2.result() assert len(r) == 1 assert [b.hash() for b in r[0]] == [b.hash() for b in BLOCKS]
def locked_blocks_iterator(start_info=(0, 0), cached_headers=50, batch_size=50, base_dir=None, headers_only=False): """ This method loads blocks from disk, skipping any orphan blocks. """ parse_method = Block.parse_as_header if headers_only else Block.parse f = Blockfiles(base_dir, start_info) for initial_location in block_info_iterator(start_info, base_dir): f.jump_to(initial_location) parse_method(f) break current_state = [] def change_state(bc, ops): for op, bh, work in ops: if op == 'add': current_state.append(bh) pass else: current_state.pop() bc = BlockChain() bc.add_change_callback(change_state) bhs = [] index = 0 for info in block_info_iterator(start_info, base_dir): bh = blockheader_for_offset_info(info, base_dir) bh.info = info bhs.append(bh) if len(bhs) > batch_size: bc.add_headers(bhs) bhs = [] if len(current_state) > cached_headers: for bh in current_state[:cached_headers]: f.jump_to(bh.info) block = parse_method(f) yield block index += 1 bc.lock_to_index(index) current_state = current_state[cached_headers:]
def test_simple_getheader(): BLOCKS = make_blocks(20) blockchain1 = BlockChain() blockchain1.add_headers(BLOCKS) block_store = dict((b.hash(), b) for b in BLOCKS) peer1, peer2 = create_handshaked_peers() block_store = {} block_chain = BlockChain() inv_collector = InvCollector() block_handler = BlockHandler(inv_collector, block_chain, block_store) for block in BLOCKS: inv_collector.advertise_item(InvItem(ITEM_TYPE_BLOCK, block.hash())) block_store[block.hash()] = block block_chain.add_headers(BLOCKS) inv_collector.add_peer(peer1) block_handler.add_peer(peer1) @asyncio.coroutine def run_peer2(): r = [] headers = yield from standards.get_headers_hashes(peer2, until_block_hash=b"\0" * 32) r.append(headers) return r f2 = asyncio.Task(run_peer2()) asyncio.get_event_loop().run_until_complete(asyncio.wait([f2])) r = f2.result() assert len(r) == 1 assert [b.hash() for b in r[0]] == [b.hash() for b in BLOCKS]
def __init__(self, network, host_port_q, should_download_block_f, block_chain_store, blockchain_change_callback, server_port=9999): """ network: a value from pycoinnet.helpers.networks host_port_q: a Queue that is being fed potential places to connect should_download_block_f: a function that accepting(block_hash, block_index) and returning a boolean indicating whether that block should be downloaded. Only used during fast-forward. block_chain_store: usually a BlockChainStore instance blockchain_change_callback: a callback that expects (blockchain, list_of_ops) that is invoked whenever the block chain is updated; blockchain is a BlockChain object and list_of_ops is a pair of tuples of the form (op, block_hash, block_index) where op is one of "add" or "remove", block_hash is a binary block hash, and block_index is an integer index number. """ block_chain = BlockChain( did_lock_to_index_f=block_chain_store.did_lock_to_index) block_chain.preload_locked_blocks(block_chain_store.headers()) block_chain.add_change_callback(block_chain_locker_callback) self.blockfetcher = Blockfetcher() self.inv_collector = InvCollector() self.block_store = TwoLevelDict() @asyncio.coroutine def _rotate(block_store): while True: block_store.rotate() yield from asyncio.sleep(1800) self.rotate_task = asyncio.Task(_rotate(self.block_store)) self.blockhandler = BlockHandler( self.inv_collector, block_chain, self.block_store, should_download_f=should_download_block_f) block_chain.add_change_callback(blockchain_change_callback) self.fast_forward_add_peer = fast_forwarder_add_peer_f(block_chain) self.fetcher_task = asyncio.Task( new_block_fetcher(self.inv_collector, block_chain)) self.nonce = int.from_bytes(os.urandom(8), byteorder="big") self.subversion = "/Notoshi/".encode("utf8") @asyncio.coroutine def run_peer(peer, fetcher, fast_forward_add_peer, blockfetcher, inv_collector, blockhandler): yield from asyncio.wait_for(peer.connection_made_future, timeout=None) version_parameters = version_data_for_peer( peer, local_port=(server_port or 0), last_block_index=block_chain.length(), nonce=self.nonce, subversion=self.subversion) version_data = yield from initial_handshake( peer, version_parameters) last_block_index = version_data["last_block_index"] fast_forward_add_peer(peer, last_block_index) blockfetcher.add_peer(peer, fetcher, last_block_index) inv_collector.add_peer(peer) blockhandler.add_peer(peer) def create_protocol_callback(): peer = BitcoinPeerProtocol(network["MAGIC_HEADER"]) install_pingpong_manager(peer) fetcher = Fetcher(peer) peer.add_task( run_peer(peer, fetcher, self.fast_forward_add_peer, self.blockfetcher, self.inv_collector, self.blockhandler)) return peer self.connection_info_q = manage_connection_count( host_port_q, create_protocol_callback, 8) self.show_task = asyncio.Task( show_connection_info(self.connection_info_q)) # listener @asyncio.coroutine def run_listener(): abstract_server = None future_peer = asyncio.Future() try: abstract_server = yield from asyncio.get_event_loop( ).create_server(protocol_factory=create_protocol_callback, port=server_port) return abstract_server except OSError as ex: logging.info("can't listen on port %d", server_port) if server_port: self.server_task = asyncio.Task(run_listener())
def test_basic(): BC = BlockChain(parent_for_0) ITEMS = [FakeBlock(i) for i in range(100)] ITEMS[0] = FakeBlock(0, parent_for_0) assert longest_block_chain(BC) == [] assert BC.length() == 0 assert BC.locked_length() == 0 assert set(BC.chain_finder.missing_parents()) == set() assert BC.parent_hash == parent_for_0 assert BC.index_for_hash(0) is None assert BC.index_for_hash(-1) is None ops = BC.add_headers(ITEMS[:5]) assert ops == [("add", ITEMS[i], i) for i in range(5)] assert BC.parent_hash == parent_for_0 assert longest_block_chain(BC) == list(range(5)) assert BC.length() == 5 assert BC.locked_length() == 0 assert set(BC.chain_finder.missing_parents()) == {parent_for_0} for i in range(5): v = BC.tuple_for_index(i) assert v[0] == i assert v[1] == parent_for_0 if i == 0 else i assert BC.index_for_hash(-1) is None ops = BC.add_headers(ITEMS[:7]) assert ops == [("add", ITEMS[i], i) for i in range(5, 7)] assert BC.parent_hash == parent_for_0 assert longest_block_chain(BC) == list(range(7)) assert BC.length() == 7 assert BC.locked_length() == 0 assert set(BC.chain_finder.missing_parents()) == {parent_for_0} for i in range(7): v = BC.tuple_for_index(i) assert v[0] == i assert v[1] == parent_for_0 if i == 0 else i assert BC.index_for_hash(-1) is None ops = BC.add_headers(ITEMS[10:14]) assert ops == [] assert BC.parent_hash == parent_for_0 assert longest_block_chain(BC) == [0, 1, 2, 3, 4, 5, 6] assert BC.locked_length() == 0 assert BC.locked_length() == 0 assert BC.length() == 7 assert set(BC.chain_finder.missing_parents()) == {parent_for_0, 9} for i in range(7): v = BC.tuple_for_index(i) assert v[0] == i assert v[1] == parent_for_0 if i == 0 else i assert BC.index_for_hash(-1) is None ops = BC.add_headers(ITEMS[7:10]) assert ops == [("add", ITEMS[i], i) for i in range(7, 14)] assert longest_block_chain(BC) == list(range(14)) assert set(BC.chain_finder.missing_parents()) == {parent_for_0} assert BC.parent_hash == parent_for_0 assert BC.locked_length() == 0 assert BC.length() == 14 for i in range(14): v = BC.tuple_for_index(i) assert v[0] == i assert v[1] == parent_for_0 if i == 0 else i assert BC.index_for_hash(-1) is None ops = BC.add_headers(ITEMS[90:]) assert ops == [] assert longest_block_chain(BC) == list(range(14)) assert set(BC.chain_finder.missing_parents()) == {parent_for_0, 89} assert BC.parent_hash == parent_for_0 assert BC.locked_length() == 0 assert BC.length() == 14 for i in range(14): v = BC.tuple_for_index(i) assert v[0] == i assert v[1] == parent_for_0 if i == 0 else i assert BC.index_for_hash(-1) is None ops = BC.add_headers(ITEMS[14:90]) assert ops == [("add", ITEMS[i], i) for i in range(14, 100)] assert longest_block_chain(BC) == list(range(100)) assert set(BC.chain_finder.missing_parents()) == {parent_for_0} assert BC.parent_hash == parent_for_0 assert BC.locked_length() == 0 assert BC.length() == 100 for i in range(100): v = BC.tuple_for_index(i) assert v[0] == i assert v[1] == parent_for_0 if i == 0 else i assert BC.index_for_hash(-1) is None
def test_chain_locking(): SIZE = 2000 COUNT = 200 ITEMS = [FakeBlock(i, i-1) for i in range(SIZE*COUNT)] ITEMS[0] = FakeBlock(0, parent_for_0) BC = BlockChain(parent_for_0) assert longest_block_chain(BC) == [] assert BC.locked_length() == 0 assert BC.length() == 0 assert set(BC.chain_finder.missing_parents()) == set() for i in range(COUNT): start, end = i*SIZE, (i+1)*SIZE lock_start = max(0, start-10) expected_parent = lock_start-1 if lock_start else parent_for_0 assert BC.length() == start assert BC.locked_length() == lock_start ops = BC.add_headers(ITEMS[start:end]) assert ops == [("add", ITEMS[i], i) for i in range(start, end)] assert longest_locked_block_chain(BC) == list(range(lock_start, end)) assert set(BC.chain_finder.missing_parents()) == {expected_parent} assert BC.parent_hash == expected_parent assert BC.locked_length() == lock_start assert BC.length() == end for i in range(start, end): v = BC.tuple_for_index(i) assert v[0] == i assert v[1] == parent_for_0 if i == 0 else i assert BC.index_for_hash(-1) is None assert BC.locked_length() == max(0, lock_start) BC.lock_to_index(end-10) assert BC.locked_length() == end-10
def __init__(self, network, host_port_q, should_download_block_f, block_chain_store, blockchain_change_callback, server_port=9999): """ network: a value from pycoinnet.helpers.networks host_port_q: a Queue that is being fed potential places to connect should_download_block_f: a function that accepting(block_hash, block_index) and returning a boolean indicating whether that block should be downloaded. Only used during fast-forward. block_chain_store: usually a BlockChainStore instance blockchain_change_callback: a callback that expects (blockchain, list_of_ops) that is invoked whenever the block chain is updated; blockchain is a BlockChain object and list_of_ops is a pair of tuples of the form (op, block_hash, block_index) where op is one of "add" or "remove", block_hash is a binary block hash, and block_index is an integer index number. """ block_chain = BlockChain(did_lock_to_index_f=block_chain_store.did_lock_to_index) block_chain.preload_locked_blocks(block_chain_store.headers()) block_chain.add_change_callback(block_chain_locker_callback) self.blockfetcher = Blockfetcher() self.inv_collector = InvCollector() self.block_store = TwoLevelDict() @asyncio.coroutine def _rotate(block_store): while True: block_store.rotate() yield from asyncio.sleep(1800) self.rotate_task = asyncio.Task(_rotate(self.block_store)) self.blockhandler = BlockHandler(self.inv_collector, block_chain, self.block_store, should_download_f=should_download_block_f) block_chain.add_change_callback(blockchain_change_callback) self.fast_forward_add_peer = fast_forwarder_add_peer_f(block_chain) self.fetcher_task = asyncio.Task(new_block_fetcher(self.inv_collector, block_chain)) self.nonce = int.from_bytes(os.urandom(8), byteorder="big") self.subversion = "/Notoshi/".encode("utf8") @asyncio.coroutine def run_peer(peer, fetcher, fast_forward_add_peer, blockfetcher, inv_collector, blockhandler): yield from asyncio.wait_for(peer.connection_made_future, timeout=None) version_parameters = version_data_for_peer( peer, local_port=(server_port or 0), last_block_index=block_chain.length(), nonce=self.nonce, subversion=self.subversion) version_data = yield from initial_handshake(peer, version_parameters) last_block_index = version_data["last_block_index"] fast_forward_add_peer(peer, last_block_index) blockfetcher.add_peer(peer, fetcher, last_block_index) inv_collector.add_peer(peer) blockhandler.add_peer(peer) def create_protocol_callback(): peer = BitcoinPeerProtocol(network["MAGIC_HEADER"]) install_pingpong_manager(peer) fetcher = Fetcher(peer) peer.add_task(run_peer( peer, fetcher, self.fast_forward_add_peer, self.blockfetcher, self.inv_collector, self.blockhandler)) return peer self.connection_info_q = manage_connection_count(host_port_q, create_protocol_callback, 8) self.show_task = asyncio.Task(show_connection_info(self.connection_info_q)) # listener @asyncio.coroutine def run_listener(): abstract_server = None try: abstract_server = yield from asyncio.get_event_loop().create_server( protocol_factory=create_protocol_callback, port=server_port) return abstract_server except OSError: logging.info("can't listen on port %d", server_port) if server_port: self.server_task = asyncio.Task(run_listener())
def test_chain_locking(): SIZE = 2000 COUNT = 200 ITEMS = [FakeBlock(i, i - 1) for i in range(SIZE * COUNT)] ITEMS[0] = FakeBlock(0, parent_for_0) BC = BlockChain(parent_for_0) assert longest_block_chain(BC) == [] assert BC.locked_length() == 0 assert BC.length() == 0 assert set(BC.chain_finder.missing_parents()) == set() for i in range(COUNT): start, end = i * SIZE, (i + 1) * SIZE lock_start = max(0, start - 10) expected_parent = lock_start - 1 if lock_start else parent_for_0 assert BC.length() == start assert BC.locked_length() == lock_start ops = BC.add_headers(ITEMS[start:end]) assert ops == [("add", ITEMS[i], i) for i in range(start, end)] assert longest_locked_block_chain(BC) == list(range(lock_start, end)) assert set(BC.chain_finder.missing_parents()) == {expected_parent} assert BC.parent_hash == expected_parent assert BC.locked_length() == lock_start assert BC.length() == end for i in range(start, end): v = BC.tuple_for_index(i) assert v[0] == i assert v[1] == parent_for_0 if i == 0 else i assert BC.index_for_hash(-1) is None assert BC.locked_length() == max(0, lock_start) BC.lock_to_index(end - 10) assert BC.locked_length() == end - 10