def test_fetcher_timeout(): peer1, peer2 = create_peers() TX_LIST = [make_tx(i) for i in range(100)] @asyncio.coroutine def run_peer1(): r = [] yield from standards.initial_handshake(peer1, VERSION_MSG) next_message = peer1.new_get_next_message_f() t = yield from next_message() r.append(t) return r @asyncio.coroutine def run_peer2(): r = [] yield from standards.initial_handshake(peer2, VERSION_MSG_2) tx_fetcher = Fetcher(peer2) tx = yield from tx_fetcher.fetch(mi(TX_LIST[0].hash()), timeout=2) r.append(tx) return r f1 = asyncio.Task(run_peer1()) f2 = asyncio.Task(run_peer2()) asyncio.get_event_loop().run_until_complete(asyncio.wait([f1, f2])) r = f1.result() assert len(r) == 1 assert r[0] == ('getdata', dict(items=(InvItem(ITEM_TYPE_TX, TX_LIST[0].hash()),))) r = f2.result() assert len(r) == 1 assert r[0] == None
def test_TxHandler_simple(): # create some peers peer1_2, peer2_1 = create_handshaked_peers(ip1="127.0.0.1", ip2="127.0.0.2") peer1_3, peer3_1 = create_handshaked_peers(ip1="127.0.0.1", ip2="127.0.0.3") TX_LIST = [make_tx(i) for i in range(20)] @asyncio.coroutine def run_client(peer_list, tx_list): tx_store = {} inv_collector = InvCollector() tx_handler = TxHandler(inv_collector, tx_store) for tx in tx_list: tx_handler.add_tx(tx) for peer in peer_list: inv_collector.add_peer(peer) tx_handler.add_peer(peer) while len(tx_store) < 20: yield from asyncio.sleep(0.1) return tx_store f1 = asyncio.Task(run_client([peer1_2, peer1_3], [])) f2 = asyncio.Task(run_client([peer2_1], TX_LIST[:10])) f3 = asyncio.Task(run_client([peer3_1], TX_LIST[10:])) done, pending = asyncio.get_event_loop().run_until_complete(asyncio.wait([f1, f2, f3], timeout=5.0)) assert len(done) == 3 assert len(pending) == 0 for i in range(3): r = done.pop().result() assert len(r) == 20 assert set(tx.hash() for tx in r.values()) == set(tx.hash() for tx in TX_LIST)
def test_InvCollector(): # create some peers peer1_2, peer2 = create_handshaked_peers() peer1_3, peer3 = create_handshaked_peers(ip1="127.0.0.1", ip2="127.0.0.3") peer1_4, peer4 = create_handshaked_peers(ip1="127.0.0.1", ip2="127.0.0.4") # peer1_* represents the local peer TX_LIST = [make_tx(i) for i in range(100)] @asyncio.coroutine def run_local_peer(inv_collector, inv_item_q): r = [] while len(r) < 90: inv_item = yield from inv_item_q.get() v = yield from inv_collector.fetch(inv_item) r.append(v) return r @asyncio.coroutine def run_remote_peer(next_message, peer, txs): tx_db = dict((tx.hash(), tx) for tx in txs) r = [] inv_items = [InvItem(ITEM_TYPE_TX, tx.hash()) for tx in txs] peer.send_msg("inv", items=inv_items) while True: t = yield from next_message() r.append(t) if t[0] == 'getdata': for inv_item in t[1]["items"]: peer.send_msg("tx", tx=tx_db[inv_item.data]) return r futures = [] for peer, txs in [(peer2, TX_LIST[:30]), (peer3, TX_LIST[30:60]), (peer4, TX_LIST[60:90])]: f = asyncio.Task( run_remote_peer(peer.new_get_next_message_f(), peer, txs)) futures.append(f) inv_collector = InvCollector() [inv_collector.add_peer(peer) for peer in [peer1_2, peer1_3, peer1_4]] f = asyncio.Task( run_local_peer(inv_collector, inv_collector.new_inv_item_queue())) done, pending = asyncio.get_event_loop().run_until_complete( asyncio.wait([f], timeout=5.0)) r = done.pop().result() assert len(r) == 90 assert [tx.hash() for tx in r] == [tx.hash() for tx in TX_LIST[:90]]
def test_InvCollector_simple(): # create some peers peer1_2, peer2 = create_handshaked_peers() # peer1_2 represents the local peer TX_LIST = [make_tx(i) for i in range(10)] @asyncio.coroutine def run_local_peer(peer_list): inv_collector = InvCollector() for peer in peer_list: inv_collector.add_peer(peer) r = [] inv_item_q = inv_collector.new_inv_item_queue() while len(r) < 10: inv_item = yield from inv_item_q.get() v = yield from inv_collector.fetch(inv_item) r.append(v) return r @asyncio.coroutine def run_remote_peer(next_message, peer, txs): tx_db = dict((tx.hash(), tx) for tx in txs) r = [] inv_items = [InvItem(ITEM_TYPE_TX, tx.hash()) for tx in txs] peer.send_msg("inv", items=inv_items) while True: t = yield from next_message() r.append(t) if t[0] == 'getdata': for inv_item in t[1]["items"]: peer.send_msg("tx", tx=tx_db[inv_item.data]) return r f1 = asyncio.Task(run_local_peer([peer1_2])) f2 = asyncio.Task( run_remote_peer(peer2.new_get_next_message_f(), peer2, TX_LIST)) done, pending = asyncio.get_event_loop().run_until_complete( asyncio.wait([f1], timeout=3.0)) r = done.pop().result() assert len(r) == 10 assert [tx.hash() for tx in r] == [tx.hash() for tx in TX_LIST]
def __init__(self, inv_collector, block_chain, block_store, should_download_f=lambda block_hash, block_index: True, block_validator=lambda block: True): self.inv_collector = inv_collector self.block_chain = block_chain self.block_store = block_store self.q = inv_collector.new_inv_item_queue() self._watch_invcollector_task = asyncio.Task( self._watch_invcollector(block_validator))
def test_BlockHandler_simple(): # create some peers peer1_2, peer2_1 = create_handshaked_peers(ip1="127.0.0.1", ip2="127.0.0.2") peer1_3, peer3_1 = create_handshaked_peers(ip1="127.0.0.1", ip2="127.0.0.3") BLOCK_LIST = make_blocks(2) @asyncio.coroutine def run_client(peer_list, block_list): block_chain = BlockChain() block_store = {} inv_collector = InvCollector() block_handler = BlockHandler(inv_collector, block_chain, block_store) for peer in peer_list: inv_collector.add_peer(peer) block_handler.add_peer(peer) for block in block_list: inv_collector.advertise_item(InvItem(ITEM_TYPE_BLOCK, block.hash())) block_store[block.hash()] = block while len(block_store) < 2: yield from asyncio.sleep(0.1) return block_store f1 = asyncio.Task(run_client([peer1_2, peer1_3], [])) f2 = asyncio.Task(run_client([peer2_1], BLOCK_LIST[0:1])) f3 = asyncio.Task(run_client([peer3_1], BLOCK_LIST[1:2])) done, pending = asyncio.get_event_loop().run_until_complete( asyncio.wait([f1, f2, f3], timeout=5.0)) assert len(done) == 3 assert len(pending) == 0 for i in range(3): r = done.pop().result() assert len(r) == 2
def run_peer2(): r = [] yield from standards.initial_handshake(peer2, VERSION_MSG_2) tx_fetcher = Fetcher(peer2) tx = yield from tx_fetcher.fetch(mi(TX_LIST[0].hash()), timeout=5) r.append(tx) tx = yield from tx_fetcher.fetch(mi(TX_LIST[1].hash())) r.append(tx) tx = yield from tx_fetcher.fetch(mi(TX_LIST[2].hash())) r.append(tx) f1 = asyncio.Task(tx_fetcher.fetch(mi(TX_LIST[3].hash()))) f2 = asyncio.Task(tx_fetcher.fetch(mi(TX_LIST[4].hash()))) f3 = asyncio.Task(tx_fetcher.fetch(mi(TX_LIST[5].hash()))) yield from asyncio.wait([f1, f2, f3]) r.append(f1.result()) r.append(f2.result()) r.append(f3.result()) return r
def run_local_peer(peer_list, r): inv_collector = InvCollector() for peer in peer_list: inv_collector.add_peer(peer) inv_item_q = inv_collector.new_inv_item_queue() @asyncio.coroutine def _do_fetch(inv_collector, inv_item, r): v = yield from inv_collector.fetch(inv_item, peer_timeout=3.0) if v: r.append(v) # keep a strong reference to these tasks tasks = [] for i in range(10): inv_item = yield from inv_item_q.get() tasks.append(asyncio.Task(_do_fetch(inv_collector, inv_item, r))) while len(r) < 10: yield from asyncio.sleep(0.1) return r
def items_for_client(initial_blocks=[]): block_store = {} block_chain = BlockChain() blockfetcher = Blockfetcher() inv_collector = InvCollector() block_handler = BlockHandler(inv_collector, block_chain, block_store) fast_forward_add_peer = fast_forwarder_add_peer_f(block_chain) for block in initial_blocks: inv_collector.advertise_item(InvItem(ITEM_TYPE_BLOCK, block.hash())) block_store[block.hash()] = block block_chain.add_headers(initial_blocks) inv_q = inv_collector.new_inv_item_queue() ap = make_add_peer(fast_forward_add_peer, blockfetcher, block_handler, inv_collector, block_chain, block_store) ap.block_getter_task = asyncio.Task( block_getter(inv_q, inv_collector, block_handler, block_chain, block_store)) return block_handler, block_chain, block_store, ap
def block_getter(inv_q, inv_collector, block_handler, block_chain, block_store): @asyncio.coroutine def fetch_block(inv_item): block = yield from inv_collector.fetch(inv_item) if block: logging.debug("fetched %s", block) block_chain.add_headers([block]) block_handler.add_block(block) block_store[block.hash()] = block while True: inv_item = yield from inv_q.get() if inv_item is None: break if inv_item.item_type != ITEM_TYPE_BLOCK: continue if inv_item.data in block_store: continue asyncio.Task(fetch_block(inv_item))
def _watch_block_chain(self, change_q, should_download_f): # this is only useful when fast-forwarding # we will skip it for now # TODO: implement def _download_block(block_hash, block_index): block = yield from self.inv_collector.fetch( InvItem(ITEM_TYPE_BLOCK, block_hash)) return block while True: add_or_remove, block_hash, block_index = yield from change_q.get() if add_or_remove != "add": continue block = self.block_store.get(block_hash) if block: continue if should_download_f(block_hash, block_index): # BRAIN DAMAGE: we have to put the task somewhere smart self._download_task = asyncio.Task( _download_block(block_hash, block_index))
def test_simple_getheader(): BLOCKS = make_blocks(20) blockchain1 = BlockChain() blockchain1.add_headers(BLOCKS) block_store = dict((b.hash(), b) for b in BLOCKS) peer1, peer2 = create_handshaked_peers() block_store = {} block_chain = BlockChain() inv_collector = InvCollector() block_handler = BlockHandler(inv_collector, block_chain, block_store) for block in BLOCKS: inv_collector.advertise_item(InvItem(ITEM_TYPE_BLOCK, block.hash())) block_store[block.hash()] = block block_chain.add_headers(BLOCKS) inv_collector.add_peer(peer1) block_handler.add_peer(peer1) @asyncio.coroutine def run_peer2(): r = [] headers = yield from standards.get_headers_hashes( peer2, after_block_hash=b'\0' * 32) r.append(headers) return r f2 = asyncio.Task(run_peer2()) asyncio.get_event_loop().run_until_complete(asyncio.wait([f2])) r = f2.result() assert len(r) == 1 assert [b.hash() for b in r[0]] == [b.hash() for b in BLOCKS]
def test_TxCollector_retry(): # create some peers peer1_2, peer2 = create_handshaked_peers() peer1_3, peer3 = create_handshaked_peers(ip1="127.0.0.1", ip2="127.0.0.3") TX_LIST = [make_tx(i) for i in range(10)] TX_LIST.sort(key=lambda tx: tx.id()) @asyncio.coroutine def run_remote_peer(peer, txs, in_db_count, delay): # this peer will immediately advertise the ten transactions # But when they are requested, it will only send one, # and "notfound" eight. yield from asyncio.sleep(delay) tx_db = dict((tx.hash(), tx) for tx in txs[:in_db_count]) r = [] inv_items = [InvItem(ITEM_TYPE_TX, tx.hash()) for tx in txs] peer.send_msg("inv", items=inv_items) next_message = peer.new_get_next_message_f() while True: t = yield from next_message() r.append(t) if t[0] == 'getdata': found = [] not_found = [] yield from asyncio.sleep(0.1) for inv_item in t[1]["items"]: if inv_item.data in tx_db: found.append(tx_db[inv_item.data]) else: not_found.append(inv_item) if not_found: if len(not_found) == 9: not_found = not_found[:8] peer.send_msg("notfound", items=not_found) for tx in found: peer.send_msg("tx", tx=tx) return r @asyncio.coroutine def run_local_peer(peer_list, r): inv_collector = InvCollector() for peer in peer_list: inv_collector.add_peer(peer) inv_item_q = inv_collector.new_inv_item_queue() @asyncio.coroutine def _do_fetch(inv_collector, inv_item, r): v = yield from inv_collector.fetch(inv_item, peer_timeout=3.0) if v: r.append(v) # keep a strong reference to these tasks tasks = [] for i in range(10): inv_item = yield from inv_item_q.get() tasks.append(asyncio.Task(_do_fetch(inv_collector, inv_item, r))) while len(r) < 10: yield from asyncio.sleep(0.1) return r f2 = asyncio.Task(run_remote_peer(peer2, TX_LIST, 1, 0.2)) f3 = asyncio.Task(run_remote_peer(peer3, TX_LIST, 10, 1.0)) r = [] f = asyncio.Task(run_local_peer([peer1_2, peer1_3], r)) done, pending = asyncio.get_event_loop().run_until_complete(asyncio.wait([f], timeout=7.5)) assert len(done) == 1 #r = done.pop().result() assert len(r) == 10 assert set(tx.hash() for tx in r) == set(tx.hash() for tx in TX_LIST)
def test_TxCollector_notfound(): peer1_2, peer2 = create_handshaked_peers() TX_LIST = [make_tx(i) for i in range(10)] @asyncio.coroutine def run_peer_2(peer, txs): # this peer will immediately advertise five transactions # But when they are requested, it will say they are "notfound". # Then it will sleep 0.25 s, then advertise one transaction, # then send it when requested. next_message = peer.new_get_next_message_f() tx_db = dict((tx.hash(), tx) for tx in txs[5:]) r = [] inv_items = [InvItem(ITEM_TYPE_TX, tx.hash()) for tx in txs] peer.send_msg("inv", items=inv_items) while 1: t = yield from next_message() r.append(t) if t[0] == 'getdata': found = [] not_found = [] for inv_item in t[1]["items"]: if inv_item.data in tx_db: found.append(tx_db[inv_item.data]) else: not_found.append(inv_item) if not_found: peer.send_msg("notfound", items=not_found) for tx in found: peer.send_msg("tx", tx=tx) return r @asyncio.coroutine def run_local_peer(peer_list): inv_collector = InvCollector() for peer in peer_list: inv_collector.add_peer(peer) r = [] inv_item_q = inv_collector.new_inv_item_queue() while len(r) < 5: yield from asyncio.sleep(0.1) inv_item = yield from inv_item_q.get() try: v = yield from asyncio.wait_for(inv_collector.fetch(inv_item), timeout=0.5) if v: r.append(v) except asyncio.TimeoutError: pass return r f2 = asyncio.Task(run_peer_2(peer2, TX_LIST)) f = asyncio.Task(run_local_peer([peer1_2])) done, pending = asyncio.get_event_loop().run_until_complete(asyncio.wait([f], timeout=7.5)) r = done.pop().result() assert len(r) == 5 assert [tx.hash() for tx in r] == [tx.hash() for tx in TX_LIST[5:]]
def test_fetcher(): peer1, peer2 = create_peers() TX_LIST = [make_tx(i) for i in range(100)] from pycoinnet.message import pack_from_data msg_data = pack_from_data("tx", tx=TX_LIST[0]) item1 = [InvItem(1, TX_LIST[0].hash())] item2 = [InvItem(1, TX_LIST[i].hash()) for i in range(2)] msg_data = pack_from_data("getdata", items=item1) msg_data = pack_from_data("getdata", items=item2) msg_data = pack_from_data("notfound", items=item1) msg_data = pack_from_data("notfound", items=item2) @asyncio.coroutine def run_peer1(): r = [] yield from standards.initial_handshake(peer1, VERSION_MSG) next_message = peer1.new_get_next_message_f() t = yield from next_message() r.append(t) peer1.send_msg("tx", tx=TX_LIST[0]) t = yield from next_message() r.append(t) peer1.send_msg("notfound", items=[InvItem(ITEM_TYPE_TX, TX_LIST[1].hash())]) t = yield from next_message() r.append(t) peer1.send_msg("tx", tx=TX_LIST[2]) t = yield from next_message() r.append(t) items = [InvItem(ITEM_TYPE_TX, TX_LIST[3].hash())] items.append([InvItem(ITEM_TYPE_TX, TX_LIST[5].hash())]) peer1.send_msg("notfound", items=items) peer1.send_msg("tx", tx=TX_LIST[4]) return r @asyncio.coroutine def run_peer2(): r = [] yield from standards.initial_handshake(peer2, VERSION_MSG_2) tx_fetcher = Fetcher(peer2) tx = yield from tx_fetcher.fetch(mi(TX_LIST[0].hash()), timeout=5) r.append(tx) tx = yield from tx_fetcher.fetch(mi(TX_LIST[1].hash())) r.append(tx) tx = yield from tx_fetcher.fetch(mi(TX_LIST[2].hash())) r.append(tx) f1 = asyncio.Task(tx_fetcher.fetch(mi(TX_LIST[3].hash()))) f2 = asyncio.Task(tx_fetcher.fetch(mi(TX_LIST[4].hash()))) f3 = asyncio.Task(tx_fetcher.fetch(mi(TX_LIST[5].hash()))) yield from asyncio.wait([f1, f2, f3]) r.append(f1.result()) r.append(f2.result()) r.append(f3.result()) return r f1 = asyncio.Task(run_peer1()) f2 = asyncio.Task(run_peer2()) asyncio.get_event_loop().run_until_complete(asyncio.wait([f1, f2])) r = f1.result() assert len(r) == 4 assert r[0] == ('getdata', dict(items=(InvItem(ITEM_TYPE_TX, TX_LIST[0].hash()),))) assert r[1] == ('getdata', dict(items=(InvItem(ITEM_TYPE_TX, TX_LIST[1].hash()),))) assert r[2] == ('getdata', dict(items=(InvItem(ITEM_TYPE_TX, TX_LIST[2].hash()),))) assert r[3] == ('getdata', dict(items=tuple(InvItem(ITEM_TYPE_TX, TX_LIST[i].hash()) for i in range(3,6)))) r = f2.result() assert len(r) == 6 assert r[0].hash() == TX_LIST[0].hash() assert r[1] == None assert r[2].hash() == TX_LIST[2].hash() assert r[3] == None assert r[4].hash() == TX_LIST[4].hash() assert r[5] == None