class ValueSpiderCrawl(SpiderCrawl): def __init__(self, protocol, node, peers, ksize, alpha): SpiderCrawl.__init__(self, protocol, node, peers, ksize, alpha) # keep track of the single nearest node without value - per # section 2.3 so we can set the key there if found self.nearestWithoutValue = NodeHeap(self.node, 1) def find(self): """ Find either the closest nodes or the value requested. """ return self._find(self.protocol.callFindValue) def _nodesFound(self, responses): """ Handle the result of an iteration in _find. """ toremove = [] foundValues = [] for peerid, response in responses.items(): response = RPCFindResponse(response) if not response.happened(): toremove.append(peerid) elif response.hasValue(): foundValues.append(response.getValue()) else: peer = self.nearest.getNodeById(peerid) self.nearestWithoutValue.push(peer) self.nearest.push(response.getNodeList()) self.nearest.remove(toremove) if len(foundValues) > 0: return self._handleFoundValues(foundValues) if self.nearest.allBeenContacted(): # not found! return None return self.find() def _handleFoundValues(self, values): """ We got some values! Exciting. But let's make sure they're all the same or freak out a little bit. Also, make sure we tell the nearest node that *didn't* have the value to store it. """ valueCounts = Counter([x[1] for x in values]) if len(valueCounts) != 1: args = (self.node.long_id, str(values)) self.log.warning("Got multiple values for key %i: %s" % args) value = sorted(values, key=lambda y: protocol.decodeTimestamp(y[1], y[2]))[-1:] if value is not None: value = value[-1] #value = valueCounts.most_common(1)[0][0] peerToSaveTo = self.nearestWithoutValue.popleft() if peerToSaveTo is not None: d = self.protocol.callStore(peerToSaveTo, self.node.id, value) return d.addCallback(lambda _: value) return value
def test_iteration(self): heap = NodeHeap(mknode(intid=0), 5) nodes = [mknode(intid=x) for x in range(10)] for index, node in enumerate(nodes): heap.push(node) for index, node in enumerate(heap): self.assertEqual(index, node.long_id) self.assertTrue(index < 5)
def test_iteration(self, mknode): # pylint: disable=no-self-use heap = NodeHeap(mknode(intid=0), 5) nodes = [mknode(intid=x) for x in range(10)] for index, node in enumerate(nodes): heap.push(node) for index, node in enumerate(heap): assert index == node.long_id assert index < 5
class ValueSpiderCrawl(SpiderCrawl): def __init__(self, protocol, node, peers, ksize, alpha): SpiderCrawl.__init__(self, protocol, node, peers, ksize, alpha) # keep track of the single nearest node without value - per # section 2.3 so we can set the key there if found self.nearestWithoutValue = NodeHeap(self.node, 1) def find(self): """ Find either the closest nodes or the value requested. """ return self._find(self.protocol.callFindValue) def _nodesFound(self, responses): """ Handle the result of an iteration in _find. """ toremove = [] foundValues = [] for peerid, response in responses.items(): response = RPCFindResponse(response) if not response.happened(): toremove.append(peerid) elif response.hasValue(): foundValues.append(response.getValue()) else: peer = self.nearest.getNodeById(peerid) self.nearestWithoutValue.push(peer) self.nearest.push(response.getNodeList()) self.nearest.remove(toremove) if len(foundValues) > 0: return self._handleFoundValues(foundValues) if self.nearest.allBeenContacted(): # not found! return None return self.find() def _handleFoundValues(self, values): """ We got some values! Exciting. But let's make sure they're all the same or freak out a little bit. Also, make sure we tell the nearest node that *didn't* have the value to store it. """ valueCounts = Counter(values) if len(valueCounts) != 1: args = (self.node.long_id, str(values)) self.log.warning("Got multiple values for key %i: %s" % args) value = valueCounts.most_common(1)[0][0] peerToSaveTo = self.nearestWithoutValue.popleft() if peerToSaveTo is not None: d = self.protocol.callStore(peerToSaveTo, self.node.id, value) d.addCallback(lambda _: value) d.addErrback(self.onError) return d return value
def test_max_size(self): node = NodeHeap(mknode(intid=0), 3) self.assertEqual(0, len(node)) for digit in range(10): node.push(mknode(intid=digit)) self.assertEqual(3, len(node)) self.assertEqual(3, len(list(node)))
def test_maxSize(self): n = NodeHeap(mknode(intid=0), 3) self.assertEqual(0, len(n)) for d in range(10): n.push(mknode(intid=d)) self.assertEqual(3, len(n)) self.assertEqual(3, len(list(n)))
def test_max_size(self, mknode): # pylint: disable=no-self-use node = NodeHeap(mknode(intid=0), 3) assert not node for digit in range(10): node.push(mknode(intid=digit)) assert len(node) == 3 assert len(list(node)) == 3
class ValueSpiderCrawl(SpiderCrawl): def __init__(self, protocol, node, peers, ksize, alpha): SpiderCrawl.__init__(self, protocol, node, peers, ksize, alpha) # keep track of the single nearest node without value - per # section 2.3 so we can set the key there if found self.nearest_without_value = NodeHeap(self.node, 1) async def find(self): """ Find either the closest nodes or the value requested. """ return await self._find(self.protocol.call_find_value) async def _nodes_found(self, responses): """ Handle the result of an iteration in _find. """ toremove = [] found_values = [] for peerid, response in responses.items(): response = RPCFindResponse(response) if not response.happened(): toremove.append(peerid) elif response.has_value(): found_values.append(response.get_value()) else: peer = self.nearest.get_node(peerid) self.nearest_without_value.push(peer) self.nearest.push(response.get_node_list()) self.nearest.remove(toremove) if found_values: return await self._handle_found_values(found_values) if self.nearest.have_contacted_all(): # not found! return None return await self.find() async def _handle_found_values(self, values): """ We got some values! Exciting. But let's make sure they're all the same or freak out a little bit. Also, make sure we tell the nearest node that *didn't* have the value to store it. """ value_counts = Counter(values) if len(value_counts) != 1: log.warning("Got multiple values for key %i: %s", self.node.long_id, str(values)) value = value_counts.most_common(1)[0][0] peer = self.nearest_without_value.popleft() if peer: await self.protocol.call_store(peer, self.node.id, value) return value
def test_remove(self): heap = NodeHeap(mknode(intid=0), 5) nodes = [mknode(intid=x) for x in range(10)] for node in nodes: heap.push(node) heap.remove([nodes[0].id, nodes[1].id]) self.assertEqual(len(list(heap)), 5) for index, node in enumerate(heap): self.assertEqual(index + 2, node.long_id) self.assertTrue(index < 5)
def __init__(self, protocol, http_client, node, chunk_key, peers, ksize, alpha, time_keeper=TimeKeeper()): TalosSpiderCrawl.__init__(self, protocol, node, chunk_key, peers, ksize, alpha) # keep track of the single nearest node without value - per # section 2.3 so we can set the key there if found self.nearestWithoutValue = NodeHeap(self.node, 1) self.http_client = http_client self.time_keeper = time_keeper self.is_first_round = True
class SpiderCrawl(object): """ Crawl the network and look for given 160-bit keys. """ def __init__(self, protocol, node, peers, ksize, alpha): """ Create a new C{SpiderCrawl}er. Args: protocol: A :class:`~kademlia.protocol.KademliaProtocol` instance. node: A :class:`~kademlia.node.Node` representing the key we're looking for peers: A list of :class:`~kademlia.node.Node` instances that provide the entry point for the network ksize: The value for k based on the paper alpha: The value for alpha based on the paper """ self.protocol = protocol self.ksize = ksize self.alpha = alpha self.node = node self.nearest = NodeHeap(self.node, self.ksize) self.lastIDsCrawled = [] self.log = Logger(system=self) self.log.info("creating spider with peers: %s" % peers) self.nearest.push(peers) def onError(self, err): self.log.error(repr(err)) return err def _find(self, rpcmethod): """ Get either a value or list of nodes. Args: rpcmethod: The protocol's callfindValue or callFindNode. The process: 1. calls find_* to current ALPHA nearest not already queried nodes, adding results to current nearest list of k nodes. 2. current nearest list needs to keep track of who has been queried already sort by nearest, keep KSIZE 3. if list is same as last time, next call should be to everyone not yet queried 4. repeat, unless nearest list has all been queried, then ur done """ self.log.info("crawling with nearest: %s" % str(tuple(self.nearest))) count = self.alpha if self.nearest.getIDs() == self.lastIDsCrawled: self.log.info("last iteration same as current - checking all in list now") count = len(self.nearest) self.lastIDsCrawled = self.nearest.getIDs() ds = {} for peer in self.nearest.getUncontacted()[:count]: ds[peer.id] = rpcmethod(peer, self.node) self.nearest.markContacted(peer) d = deferredDict(ds) d.addCallback(self._nodesFound) d.addErrback(self.onError) return d
def __init__(self, protocol, prefix, lowest_node, highest_node, peers, ksize, alpha): SpiderCrawl.__init__(self, protocol, lowest_node, peers, ksize, alpha) self.prefix = prefix self.lowest_node = lowest_node self.highest_node = highest_node self.get_more_nodes = True self.nodesToQuery = NodeHeap(self.lowest_node, 1000000) self.nodesToQuery.push(peers) self.foundValues = [] self.lastNodesQueried = []
class SpiderCrawl: """ Crawl the network and look for given 160-bit keys. """ def __init__(self, protocol, node, peers, ksize, alpha): """ Create a new C{SpiderCrawl}er. Args: protocol: A :class:`~kademlia.protocol.KademliaProtocol` instance. node: A :class:`~kademlia.node.Node` representing the key we're looking for peers: A list of :class:`~kademlia.node.Node` instances that provide the entry point for the network ksize: The value for k based on the paper alpha: The value for alpha based on the paper """ self.protocol = protocol self.ksize = ksize self.alpha = alpha self.node = node self.nearest = NodeHeap(self.node, self.ksize) self.last_ids_crawled = [] log.info("creating spider with peers: %s", peers) self.nearest.push(peers) async def _find(self, rpcmethod): """ Get either a value or list of nodes. Args: rpcmethod: The protocol's callfindValue or call_find_node. The process: 1. calls find_* to current ALPHA nearest not already queried nodes, adding results to current nearest list of k nodes. 2. current nearest list needs to keep track of who has been queried already sort by nearest, keep KSIZE 3. if list is same as last time, next call should be to everyone not yet queried 4. repeat, unless nearest list has all been queried, then ur done """ log.info("crawling network with nearest: %s", str(tuple(self.nearest))) count = self.alpha if self.nearest.get_ids() == self.last_ids_crawled: count = len(self.nearest) self.last_ids_crawled = self.nearest.get_ids() dicts = {} for peer in self.nearest.get_uncontacted()[:count]: dicts[peer.id] = rpcmethod(peer, self.node) self.nearest.mark_contacted(peer) found = await gather_dict(dicts) return await self._nodes_found(found) async def _nodes_found(self, responses): raise NotImplementedError
def __init__(self, protocol, node, peers, ksize, alpha): """ Create a new C{SpiderCrawl}er. Args: protocol: A :class:`~kademlia.protocol.KademliaProtocol` instance. node: A :class:`~kademlia.node.Node` representing the key we're looking for peers: A list of :class:`~kademlia.node.Node` instances that provide the entry point for the network ksize: The value for k based on the paper alpha: The value for alpha based on the paper """ self.protocol = protocol self.ksize = ksize self.alpha = alpha self.node = node self.nearest = NodeHeap(self.node, self.ksize) self.lastIDsCrawled = [] self.log = Logger(system=self) self.log.info("creating spider with peers: %s" % peers) self.nearest.push(peers)
class SpiderCrawl(object): """ Crawl the network and look for given 160-bit keys. """ def __init__(self, protocol, node, peers, ksize, alpha): """ Create a new C{SpiderCrawl}er. Args: protocol: A :class:`~kademlia.protocol.KademliaProtocol` instance. node: A :class:`~kademlia.node.Node` representing the key we're looking for peers: A list of :class:`~kademlia.node.Node` instances that provide the entry point for the network ksize: The value for k based on the paper alpha: The value for alpha based on the paper """ self.protocol = protocol self.ksize = ksize self.alpha = alpha self.node = node self.nearest = NodeHeap(self.node, self.ksize) self.lastIDsCrawled = [] self.log = Logger(system=self) self.log.info("creating spider with peers: %s" % peers) self.nearest.push(peers) def _find(self, rpcmethod): """ Get either a value or list of nodes. Args: rpcmethod: The protocol's callfindValue or callFindNode. The process: 1. calls find_* to current ALPHA nearest not already queried nodes, adding results to current nearest list of k nodes. 2. current nearest list needs to keep track of who has been queried already sort by nearest, keep KSIZE 3. if list is same as last time, next call should be to everyone not yet queried 4. repeat, unless nearest list has all been queried, then ur done """ self.log.info("crawling with nearest: %s" % str(tuple(self.nearest))) count = self.alpha if self.nearest.getIDs() == self.lastIDsCrawled: self.log.info( "last iteration same as current - checking all in list now") count = len(self.nearest) self.lastIDsCrawled = self.nearest.getIDs() ds = {} for peer in self.nearest.getUncontacted()[:count]: ds[peer.id] = rpcmethod(peer, self.node) self.nearest.markContacted(peer) return deferredDict(ds).addCallback(self._nodesFound)
class ValueSpiderCrawl(SpiderCrawl): def __init__(self, protocol, node, peers, ksize, alpha): SpiderCrawl.__init__(self, protocol, node, peers, ksize, alpha) # keep track of the single nearest node without value - per # section 2.3 so we can set the key there if found self.nearestWithoutValue = NodeHeap(self.node, 1) async def find(self, found_values=None): """ Find either the closest nodes or the value requested. """ return await self._find(self.protocol.callFindValue, found_values) async def _nodesFound(self, responses, found_values=None): """ Handle the result of an iteration in _find. """ if found_values is None: found_values = [] toremove = [] for peerid, response in responses.items(): response = RPCFindResponse(response) if not response.happened(): toremove.append(peerid) elif response.hasValue(): if response.isValid(self.node.id): found_values.append(response.getValue()) else: peer = self.nearest.getNodeById(peerid) self.nearestWithoutValue.push(peer) self.nearest.push(response.getNodeList()) self.nearest.remove(toremove) if len(found_values ) >= Config.VALUES_TO_WAIT or self.nearest.allBeenContacted(): return found_values return await self.find(found_values)
def test_remove(self, mknode): # pylint: disable=no-self-use heap = NodeHeap(mknode(intid=0), 5) nodes = [mknode(intid=x) for x in range(10)] for node in nodes: heap.push(node) heap.remove([nodes[0].id, nodes[1].id]) assert len(list(heap)) == 5 for index, node in enumerate(heap): assert index + 2 == node.long_id assert index < 5
def _nodesFound(self, responses): """ Handle the result of an iteration in _find. """ if self.get_more_nodes: toremove = [] for peerid, response in responses.items(): response = RPCFindResponse(response) if not response.happened(): toremove.append(peerid) else: self.nearest.push(response.getNodeList()) self.nodesToQuery.push(response.getNodeList()) self.nearest.remove(toremove) self.nodesToQuery.remove(toremove) self.get_more_nodes = False return self.find() else: toremove = [] for peerid, response in responses.items(): response = RPCFindResponse(response) if not response.happened(): toremove.append(peerid) else: values = response.getValues() if values is not None: for value in values: self.foundValues.append(value) neighbors = response.getNeighbors() for neighbor in neighbors: n = Node(neighbor[0], neighbor[1], neighbor[2]) self.nodesToQuery.push(n) if self.nodesToQuery.allBeenContacted() and len(self.foundValues) == 0: return None elif self.nodesToQuery.allBeenContacted(): return self._handleFoundValues(self.foundValues) self.nodesToQuery.remove(toremove) temp = self.nodesToQuery.getUncontacted() for node in temp: self.nodesToQuery.push(node) if self.highest_node > node.id > self.lowest_node.id: self.lowest_node = node self.nodesToQuery = NodeHeap(self.lowest_node, 1000000) for node in temp: self.nodesToQuery.push(node) return self.find()
def __init__(self, protocol, node, peers, ksize, alpha): """ Create a new C{SpiderCrawl}er. @param protocol: a C{KademliaProtocol} instance. @param node: A C{Node} representing the key we're looking for @param peers: A list of C{Node}s that provide the entry point for the network @param ksize: The value for k based on the paper @param alpha: The value for alpha based on the paper """ self.protocol = protocol self.ksize = ksize self.alpha = alpha self.node = node self.nearest = NodeHeap(self.node, self.ksize) self.lastIDsCrawled = [] self.log = Logger(system=self) self.log.info("creating spider with peers: %s" % peers) self.nearest.push(peers)
class RangeSpiderCrawl(SpiderCrawl): def __init__(self, protocol, prefix, lowest_node, highest_node, peers, ksize, alpha): SpiderCrawl.__init__(self, protocol, lowest_node, peers, ksize, alpha) self.prefix = prefix self.lowest_node = lowest_node self.highest_node = highest_node self.get_more_nodes = True self.nodesToQuery = NodeHeap(self.lowest_node, 1000000) self.nodesToQuery.push(peers) self.foundValues = [] self.lastNodesQueried = [] def find(self): """ Find either the closest nodes or the value requested. """ if self.get_more_nodes: return self._find(self.protocol.callFindNode) else: self.log.info("crawling with nearest: %s" % str(tuple(self.nodesToQuery))) count = self.alpha if self.nodesToQuery.getIDs() == self.lastNodesQueried: self.log.info("last iteration same as current - checking all in list now") count = len(self.nearest) self.lastNodesQueried = self.nodesToQuery.getIDs() ds = {} for peer in self.nodesToQuery.getUncontacted()[:count]: ds[peer.id] = self.protocol.callFindRange(peer, self.prefix) self.nodesToQuery.markContacted(peer) return deferredDict(ds).addCallback(self._nodesFound) def _nodesFound(self, responses): """ Handle the result of an iteration in _find. """ if self.get_more_nodes: toremove = [] for peerid, response in responses.items(): response = RPCFindResponse(response) if not response.happened(): toremove.append(peerid) else: self.nearest.push(response.getNodeList()) self.nodesToQuery.push(response.getNodeList()) self.nearest.remove(toremove) self.nodesToQuery.remove(toremove) self.get_more_nodes = False return self.find() else: toremove = [] for peerid, response in responses.items(): response = RPCFindResponse(response) if not response.happened(): toremove.append(peerid) else: values = response.getValues() if values is not None: for value in values: self.foundValues.append(value) neighbors = response.getNeighbors() for neighbor in neighbors: n = Node(neighbor[0], neighbor[1], neighbor[2]) self.nodesToQuery.push(n) if self.nodesToQuery.allBeenContacted() and len(self.foundValues) == 0: return None elif self.nodesToQuery.allBeenContacted(): return self._handleFoundValues(self.foundValues) self.nodesToQuery.remove(toremove) temp = self.nodesToQuery.getUncontacted() for node in temp: self.nodesToQuery.push(node) if self.highest_node > node.id > self.lowest_node.id: self.lowest_node = node self.nodesToQuery = NodeHeap(self.lowest_node, 1000000) for node in temp: self.nodesToQuery.push(node) return self.find() def _handleFoundValues(self, values): """ We got some values! Exciting. Let's remove duplicates from our list before returning. """ ret = [] for value in values: if value not in ret: ret.append(value) return ret
def __init__(self, protocol, node, peers, ksize, alpha): SpiderCrawl.__init__(self, protocol, node, peers, ksize, alpha) # keep track of the single nearest node without value - per # section 2.3 so we can set the key there if found self.nearest_without_value = NodeHeap(self.node, 1)
class TalosChunkSpiderCrawl(TalosSpiderCrawl): def __init__(self, protocol, http_client, node, chunk_key, peers, ksize, alpha, time_keeper=TimeKeeper()): TalosSpiderCrawl.__init__(self, protocol, node, chunk_key, peers, ksize, alpha) # keep track of the single nearest node without value - per # section 2.3 so we can set the key there if found self.nearestWithoutValue = NodeHeap(self.node, 1) self.http_client = http_client self.time_keeper = time_keeper self.is_first_round = True def find(self): """ Find either the closest nodes or the value requested. """ if self.is_first_round: self.time_keeper.start_clock() self.is_first_round = False return self._find_value(self.protocol.callFindValue) def _nodesFound(self, responses): """ Handle the result of an iteration in _find. """ toremove = [] foundValues = [] for peerid, response in responses.items(): response = TalosRPCFindValueResponse(response) if not response.happened(): toremove.append(peerid) elif response.hasValue(): foundValues.append(response.getValue()) elif response.hasError(): return response.getError() else: peer = self.nearest.getNodeById(peerid) self.nearestWithoutValue.push(peer) self.nearest.push(response.getNodeList()) self.nearest.remove(toremove) if len(foundValues) > 0: return self._handleFoundValues(foundValues) if self.nearest.allBeenContacted(): # not found! return None return self.find() def _handleFoundValues(self, values): """ We got some values! Exciting. But let's make sure they're all the same or freak out a little bit. Also, make sure we tell the nearest node that *didn't* have the value to store it. """ self.time_keeper.stop_clock(ENTRY_TIME_FIND_VALUE) valueCounts = Counter(values) if len(valueCounts) != 1: args = (self.node.long_id, str(values)) self.log.warning("Got multiple values for key %i: %s" % args) value = valueCounts.most_common(1)[0][0] peerToSaveTo = self.nearestWithoutValue.popleft() # Should be rethinked with our approach if peerToSaveTo is not None: self.log.warning("Got closer node %s" % peerToSaveTo) """ if peerToSaveTo is not None: self.protocol.callStore(peerToSaveTo, self.node.id, value) return value """ return value
def __init__(self, protocol, node, peers, ksize, alpha): SpiderCrawl.__init__(self, protocol, node, peers, ksize, alpha) # keep track of the single nearest node without value - per # section 2.3 so we can set the key there if found self.nearestWithoutValue = NodeHeap(self.node, 1)