def run_peer1(): r = [] yield from standards.initial_handshake(peer1, VERSION_MSG) next_message = peer1.new_get_next_message_f() t = yield from next_message() r.append(t) peer1.send_msg("tx", tx=TX_LIST[0]) t = yield from next_message() r.append(t) peer1.send_msg("notfound", items=[InvItem(ITEM_TYPE_TX, TX_LIST[1].hash())]) t = yield from next_message() r.append(t) peer1.send_msg("tx", tx=TX_LIST[2]) t = yield from next_message() r.append(t) items = [InvItem(ITEM_TYPE_TX, TX_LIST[3].hash())] items.append([InvItem(ITEM_TYPE_TX, TX_LIST[5].hash())]) peer1.send_msg("notfound", items=items) peer1.send_msg("tx", tx=TX_LIST[4]) return r
def run_peer_2(peer, txs): # this peer will immediately advertise five transactions # But when they are requested, it will say they are "notfound". # Then it will sleep 0.25 s, then advertise one transaction, # then send it when requested. next_message = peer.new_get_next_message_f() tx_db = dict((tx.hash(), tx) for tx in txs[5:]) r = [] inv_items = [InvItem(ITEM_TYPE_TX, tx.hash()) for tx in txs] peer.send_msg("inv", items=inv_items) while 1: t = yield from next_message() r.append(t) if t[0] == 'getdata': found = [] not_found = [] for inv_item in t[1]["items"]: if inv_item.data in tx_db: found.append(tx_db[inv_item.data]) else: not_found.append(inv_item) if not_found: peer.send_msg("notfound", items=not_found) for tx in found: peer.send_msg("tx", tx=tx) return r
def run_remote_peer(peer, txs, in_db_count, delay): # this peer will immediately advertise the ten transactions # But when they are requested, it will only send one, # and "notfound" eight. yield from asyncio.sleep(delay) tx_db = dict((tx.hash(), tx) for tx in txs[:in_db_count]) r = [] inv_items = [InvItem(ITEM_TYPE_TX, tx.hash()) for tx in txs] peer.send_msg("inv", items=inv_items) next_message = peer.new_get_next_message_f() while True: t = yield from next_message() r.append(t) if t[0] == 'getdata': found = [] not_found = [] yield from asyncio.sleep(0.1) for inv_item in t[1]["items"]: if inv_item.data in tx_db: found.append(tx_db[inv_item.data]) else: not_found.append(inv_item) if not_found: if len(not_found) == 9: not_found = not_found[:8] peer.send_msg("notfound", items=not_found) for tx in found: peer.send_msg("tx", tx=tx) return r
def test_fetcher_timeout(): peer1, peer2 = create_peers() TX_LIST = [make_tx(i) for i in range(100)] @asyncio.coroutine def run_peer1(): r = [] yield from standards.initial_handshake(peer1, VERSION_MSG) next_message = peer1.new_get_next_message_f() t = yield from next_message() r.append(t) return r @asyncio.coroutine def run_peer2(): r = [] yield from standards.initial_handshake(peer2, VERSION_MSG_2) tx_fetcher = Fetcher(peer2) tx = yield from tx_fetcher.fetch(mi(TX_LIST[0].hash()), timeout=2) r.append(tx) return r f1 = asyncio.Task(run_peer1()) f2 = asyncio.Task(run_peer2()) asyncio.get_event_loop().run_until_complete(asyncio.wait([f1, f2])) r = f1.result() assert len(r) == 1 assert r[0] == ('getdata', dict(items=(InvItem(ITEM_TYPE_TX, TX_LIST[0].hash()),))) r = f2.result() assert len(r) == 1 assert r[0] == None
def _fetch_loop(self, next_message, getdata_loop_future): try: while True: name, data = yield from next_message() if name in ["tx", "block"]: item = data[name] the_hash = item.hash() inv_item = InvItem( ITEM_TYPE_TX if name == 'tx' else ITEM_TYPE_BLOCK, the_hash) future = self.futures.get(inv_item) if future: del self.futures[inv_item] if not future.done(): future.set_result(item) else: logging.info("got %s unsolicited", item.id()) if name == "notfound": for inv_item in data["items"]: the_hash = inv_item.data future = self.futures.get(inv_item) if future: del self.futures[inv_item] future.set_result(None) except EOFError: getdata_loop_future.cancel()
def _fetch_missing(peer, header): next_message = peer.new_get_next_message_f( lambda msg, data: msg == 'block') the_hash = header.previous_block_hash peer.send_msg("getdata", items=[InvItem(ITEM_TYPE_BLOCK, the_hash)]) msg, data = yield from next_message() block = data["block"] return block
def add_tx(self, tx): """ Add a transaction to the mempool and advertise it to peers so it can propogate throughout the network. """ the_hash = tx.hash() if the_hash not in self.tx_store: self.tx_store[the_hash] = tx self.inv_collector.advertise_item(InvItem(ITEM_TYPE_TX, the_hash))
def add_block(self, block): """ Add a block and advertise it to peers so it can propogate throughout the network. """ the_hash = block.hash() if the_hash not in self.block_store: self.block_store[the_hash] = block self.inv_collector.advertise_item( InvItem(ITEM_TYPE_BLOCK, the_hash))
def _run_mempool(next_message): try: name, data = yield from next_message() inv_items = [InvItem(ITEM_TYPE_TX, tx.hash()) for tx in self.tx_store.values()] logging.debug("sending inv of %d item(s) in response to mempool", len(inv_items)) if len(inv_items) > 0: peer.send_msg("inv", items=inv_items) # then we exit. We don't need to handle this message more than once. except EOFError: pass
def _fetch_loop(self, next_message, getdata_loop_future): try: while True: name, data = yield from next_message() ITEM_LOOKUP = dict(tx="tx", block="block", merkleblock="header") if name in ITEM_LOOKUP: item = data[ITEM_LOOKUP[name]] the_hash = item.hash() TYPE_DB = { "tx": ITEM_TYPE_TX, "block": ITEM_TYPE_BLOCK, "merkleblock": ITEM_TYPE_MERKLEBLOCK } the_type = TYPE_DB[name] inv_item = InvItem(the_type, the_hash) if name == "merkleblock": txs = [] for h in data["tx_hashes"]: name, data = yield from next_message() if name != "tx": logging.error( "insufficient tx messages after merkleblock message: missing %s", b2h_rev(h)) del self.futures[inv_item] future.set_result(None) break tx = data["tx"] if tx.hash() != h: logging.error( "missing tx message after merkleblock message: missing %s", b2h_rev(h)) del self.futures[inv_item] future.set_result(None) break txs.append(tx) item.txs = txs future = self.futures.get(inv_item) if future: del self.futures[inv_item] if not future.done(): future.set_result(item) else: logging.info("got %s unsolicited", item.id()) if name == "notfound": for inv_item in data["items"]: the_hash = inv_item.data future = self.futures.get(inv_item) if future: del self.futures[inv_item] future.set_result(None) except EOFError: getdata_loop_future.cancel()
def _fetch_missing(peer, blockchain): next_message = peer.new_get_next_message_f( lambda msg, data: msg == 'block') ops = [] for h in blockchain.chain_finder.missing_parents(): peer.send_msg("getdata", items=[InvItem(ITEM_TYPE_BLOCK, h)]) msg, data = yield from next_message() block = data["block"] ops = blockchain.add_headers([block]) if len(ops) > 0: break return ops
def run_remote_peer(next_message, peer, txs): tx_db = dict((tx.hash(), tx) for tx in txs) r = [] inv_items = [InvItem(ITEM_TYPE_TX, tx.hash()) for tx in txs] peer.send_msg("inv", items=inv_items) while True: t = yield from next_message() r.append(t) if t[0] == 'getdata': for inv_item in t[1]["items"]: peer.send_msg("tx", tx=tx_db[inv_item.data]) return r
def run_client(peer_list, block_list): block_chain = BlockChain() block_store = {} inv_collector = InvCollector() block_handler = BlockHandler(inv_collector, block_chain, block_store) for peer in peer_list: inv_collector.add_peer(peer) block_handler.add_peer(peer) for block in block_list: inv_collector.advertise_item(InvItem(ITEM_TYPE_BLOCK, block.hash())) block_store[block.hash()] = block while len(block_store) < 2: yield from asyncio.sleep(0.1) return block_store
def items_for_client(initial_blocks=[]): block_store = {} block_chain = BlockChain() blockfetcher = Blockfetcher() inv_collector = InvCollector() block_handler = BlockHandler(inv_collector, block_chain, block_store) fast_forward_add_peer = fast_forwarder_add_peer_f(block_chain) for block in initial_blocks: inv_collector.advertise_item(InvItem(ITEM_TYPE_BLOCK, block.hash())) block_store[block.hash()] = block block_chain.add_headers(initial_blocks) inv_q = inv_collector.new_inv_item_queue() ap = make_add_peer(fast_forward_add_peer, blockfetcher, block_handler, inv_collector, block_chain, block_store) ap.block_getter_task = asyncio.Task( block_getter(inv_q, inv_collector, block_handler, block_chain, block_store)) return block_handler, block_chain, block_store, ap
def test_simple_getheader(): BLOCKS = make_blocks(20) blockchain1 = BlockChain() blockchain1.add_headers(BLOCKS) block_store = dict((b.hash(), b) for b in BLOCKS) peer1, peer2 = create_handshaked_peers() block_store = {} block_chain = BlockChain() inv_collector = InvCollector() block_handler = BlockHandler(inv_collector, block_chain, block_store) for block in BLOCKS: inv_collector.advertise_item(InvItem(ITEM_TYPE_BLOCK, block.hash())) block_store[block.hash()] = block block_chain.add_headers(BLOCKS) inv_collector.add_peer(peer1) block_handler.add_peer(peer1) @asyncio.coroutine def run_peer2(): r = [] headers = yield from standards.get_headers_hashes( peer2, after_block_hash=b'\0' * 32) r.append(headers) return r f2 = asyncio.Task(run_peer2()) asyncio.get_event_loop().run_until_complete(asyncio.wait([f2])) r = f2.result() assert len(r) == 1 assert [b.hash() for b in r[0]] == [b.hash() for b in BLOCKS]
def get_block_future(self, block_hash, block_index): future = asyncio.Future() item = (block_index, InvItem(ITEM_TYPE_BLOCK, block_hash), future) self.block_hash_priority_queue.put_nowait(item) return future
def test_fetcher(): peer1, peer2 = create_peers() TX_LIST = [make_tx(i) for i in range(100)] from pycoinnet.message import pack_from_data msg_data = pack_from_data("tx", tx=TX_LIST[0]) item1 = [InvItem(1, TX_LIST[0].hash())] item2 = [InvItem(1, TX_LIST[i].hash()) for i in range(2)] msg_data = pack_from_data("getdata", items=item1) msg_data = pack_from_data("getdata", items=item2) msg_data = pack_from_data("notfound", items=item1) msg_data = pack_from_data("notfound", items=item2) @asyncio.coroutine def run_peer1(): r = [] yield from standards.initial_handshake(peer1, VERSION_MSG) next_message = peer1.new_get_next_message_f() t = yield from next_message() r.append(t) peer1.send_msg("tx", tx=TX_LIST[0]) t = yield from next_message() r.append(t) peer1.send_msg("notfound", items=[InvItem(ITEM_TYPE_TX, TX_LIST[1].hash())]) t = yield from next_message() r.append(t) peer1.send_msg("tx", tx=TX_LIST[2]) t = yield from next_message() r.append(t) items = [InvItem(ITEM_TYPE_TX, TX_LIST[3].hash())] items.append([InvItem(ITEM_TYPE_TX, TX_LIST[5].hash())]) peer1.send_msg("notfound", items=items) peer1.send_msg("tx", tx=TX_LIST[4]) return r @asyncio.coroutine def run_peer2(): r = [] yield from standards.initial_handshake(peer2, VERSION_MSG_2) tx_fetcher = Fetcher(peer2) tx = yield from tx_fetcher.fetch(mi(TX_LIST[0].hash()), timeout=5) r.append(tx) tx = yield from tx_fetcher.fetch(mi(TX_LIST[1].hash())) r.append(tx) tx = yield from tx_fetcher.fetch(mi(TX_LIST[2].hash())) r.append(tx) f1 = asyncio.Task(tx_fetcher.fetch(mi(TX_LIST[3].hash()))) f2 = asyncio.Task(tx_fetcher.fetch(mi(TX_LIST[4].hash()))) f3 = asyncio.Task(tx_fetcher.fetch(mi(TX_LIST[5].hash()))) yield from asyncio.wait([f1, f2, f3]) r.append(f1.result()) r.append(f2.result()) r.append(f3.result()) return r f1 = asyncio.Task(run_peer1()) f2 = asyncio.Task(run_peer2()) asyncio.get_event_loop().run_until_complete(asyncio.wait([f1, f2])) r = f1.result() assert len(r) == 4 assert r[0] == ('getdata', dict(items=(InvItem(ITEM_TYPE_TX, TX_LIST[0].hash()),))) assert r[1] == ('getdata', dict(items=(InvItem(ITEM_TYPE_TX, TX_LIST[1].hash()),))) assert r[2] == ('getdata', dict(items=(InvItem(ITEM_TYPE_TX, TX_LIST[2].hash()),))) assert r[3] == ('getdata', dict(items=tuple(InvItem(ITEM_TYPE_TX, TX_LIST[i].hash()) for i in range(3,6)))) r = f2.result() assert len(r) == 6 assert r[0].hash() == TX_LIST[0].hash() assert r[1] == None assert r[2].hash() == TX_LIST[2].hash() assert r[3] == None assert r[4].hash() == TX_LIST[4].hash() assert r[5] == None
def mi(the_hash): return InvItem(ITEM_TYPE_TX, the_hash)
def _download_block(block_hash, block_index): block = yield from self.inv_collector.fetch( InvItem(ITEM_TYPE_BLOCK, block_hash)) return block