async def _refresh_table(self): """ Refresh buckets that haven't had any lookups in the last hour (per section 2.3 of the paper). """ ds = [] for node_id in self.protocol.getRefreshIDs(): node = Node(node_id) nearest = self.protocol.router.findNeighbors(node, self.alpha) spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) ds.append(spider.find()) # do our crawling await asyncio.gather(*ds) # now republish keys older than one hour for dkey, value in self.storage.iteritemsOlderThan(3600): values_to_republish = [] parsed_val = json.loads(value) if isinstance(parsed_val, list): [ values_to_republish.append(json.dumps(val)) for val in parsed_val ] else: values_to_republish.append(value) for val in values_to_republish: await self._call_remote_persist(dkey, val)
def initTable(results, challenge, id): nodes = [] for addr, result in results.items(): ip = addr[0] port = addr[1] if result[0]: resultId = result[1]['id'] resultIdHex = resultId.encode('hex').upper() resultSign = result[1]['signature'] data = self.protocol.certificateExists(resultIdHex) if not data: identifier = "{}cert".format(resultIdHex) self.protocol.callCertFindValue(Node(resultId, ip, port), Node(identifier)) else: cert_stored = self.protocol.searchForCertificate(resultIdHex) try: OpenSSL.crypto.verify(cert_stored, resultSign, challenge, "sha256") except: traceback.print_exc() nodes.append(Node(resultId, ip, port)) spider = NodeSpiderCrawl(self.protocol, self.node, nodes, self.ksize, self.alpha) return spider.find()
def send_message(self, nodeid, message): """ Send a message to a given node on the network. """ hexid = binascii.hexlify(nodeid) self.log.debug("messaging '%s' '%s'" % (hexid, message)) node = Node(nodeid) def found_callback(nodes): self.log.debug("nearest nodes %s" % list(map(str, nodes))) nodes = filter(lambda n: n.id == nodeid, nodes) if len(nodes) == 0: self.log.debug("couldnt find destination node") return defer.succeed(None) else: self.log.debug("found node %s" % binascii.hexlify(nodes[0].id)) async = self.protocol.callMessage(nodes[0], message) return async .addCallback(lambda r: r[0] and r[1] or None) nearest = self.protocol.router.findNeighbors(node) if len(nearest) == 0: self.log.warning("There are no known neighbors to find %s" % hexid) return defer.succeed(None) spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) return spider.find().addCallback(found_callback)
def initTable(results): nodes = [] for addr, result in results.items(): if result[0]: nodes.append(Node(result[1], addr[0], addr[1])) spider = NodeSpiderCrawl(self.protocol, self.node, nodes, self.ksize, self.alpha) return spider.find()
def set(self, key, value): """ Set the given key to the given value in the network. """ _log.debug("setting '%s' = '%s' on network" % (key, value)) dkey = digest(key) node = Node(dkey) def store(nodes): _log.debug("setting '%s' to %s on %s" % (key, value, map(str, nodes))) # if this node is close too, then store here as well if (not nodes or self.node.distanceTo(node) < max( [n.distanceTo(node) for n in nodes]) or dkey in self.storage): _log.debug("setting '%s' to %s locally" % (key, value)) self.storage[dkey] = value ds = [self.protocol.callStore(n, dkey, value) for n in nodes] return defer.DeferredList(ds).addCallback(self._anyRespondSuccess) nearest = self.protocol.router.findNeighbors(node) if len(nearest) == 0: _log.warning("There are no known neighbors to set key %s" % key) return defer.succeed(False) spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) return spider.find().addCallback(store)
def append(self, key, value): """ Append the given key to the given value in the network. """ self.log.debug("setting '%s' = '%s' on network" % (key, value)) dkey = digest(key) def append(nodes, mid): self.log.info("setting '%s' on %s" % (key, map(str, nodes))) # TODO: Must add transaction ID so we dont append multiple times. print "org mid", mid mid = uuid.uuid1().hex print "new mid", mid ds = [self.protocol.callAppend(node, mid, dkey, value) for node in nodes] return defer.DeferredList(ds).addCallback(self._anyRespondSuccess) node = Node(dkey) nearest = self.protocol.router.findNeighbors(node) if len(nearest) == 0: self.log.warning("There are no known neighbors to set key %s" % key) return defer.succeed(False) spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) return spider.find().addCallback(append, "hej")
def send_message(self, nodeid, message): """ Send a message to a given node on the network. """ hexid = binascii.hexlify(nodeid) self.log.debug("messaging '%s' '%s'" % (hexid, message)) node = Node(nodeid) def found_callback(nodes): self.log.debug("nearest nodes %s" % list(map(str, nodes))) nodes = filter(lambda n: n.id == nodeid, nodes) if len(nodes) == 0: self.log.debug("couldnt find destination node") return defer.succeed(None) else: self.log.debug("found node %s" % binascii.hexlify(nodes[0].id)) async = self.protocol.callMessage(nodes[0], message) return async.addCallback(lambda r: r[0] and r[1] or None) nearest = self.protocol.router.findNeighbors(node) if len(nearest) == 0: self.log.warning("There are no known neighbors to find %s" % hexid) return defer.succeed(None) spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) return spider.find().addCallback(found_callback)
def initTable(results, challenge, id): nodes = [] for addr, result in results.items(): ip = addr[0] port = addr[1] if result[0]: resultId = result[1]['id'] resultIdHex = resultId.encode('hex').upper() resultSign = result[1]['signature'] data = self.protocol.certificateExists(resultIdHex) if not data: identifier = "{}cert".format(resultIdHex) self.protocol.callCertFindValue( Node(resultId, ip, port), Node(identifier)) else: cert_stored = self.protocol.searchForCertificate( resultIdHex) try: self.runtime_credentials.verify_signed_data_from_certstring( cert_stored, resultSign, challenge, certificate.TRUSTSTORE_TRANSPORT) except: traceback.print_exc() nodes.append(Node(resultId, ip, port)) spider = NodeSpiderCrawl(self.protocol, self.node, nodes, self.ksize, self.alpha) return spider.find()
def set(self, key, value): """ Set the given key to the given value in the network. """ self.log.debug("setting '%s' = '%s' on network" % (key, value)) dkey = digest(key) node = Node(dkey) def store(nodes): self.log.info("setting '%s' on %s" % (key, list(map(str, nodes)))) # if this node is close too, then store here as well if self.node.distanceTo(node) < max([n.distanceTo(node) for n in nodes]): self.storage[dkey] = value ds = [self.protocol.callStore(n, dkey, value) for n in nodes] d = defer.DeferredList(ds) d.addCallback(self._anyRespondSuccess) d.addErrback(self.onError) return d nearest = self.protocol.router.findNeighbors(node) if len(nearest) == 0: self.log.warning("There are no known neighbors to set key %s" % key) return defer.succeed(False) spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) d = spider.find() d.addCallback(store) d.addErrback(self.onError) return d
def append(self, key, value): """ For the given key append the given list values to the set in the network. """ dkey = digest(key) node = Node(dkey) def append_(nodes): # if this node is close too, then store here as well if not nodes or self.node.distanceTo(node) < max([n.distanceTo(node) for n in nodes]): try: pvalue = json.loads(value) self.set_keys.add(dkey) if dkey not in self.storage: _log.debug("%s local append key: %s not in storage set value: %s" % (base64.b64encode(node.id), base64.b64encode(dkey), pvalue)) self.storage[dkey] = value else: old_value_ = self.storage[dkey] old_value = json.loads(old_value_) new_value = list(set(old_value + pvalue)) _log.debug("%s local append key: %s old: %s add: %s new: %s" % (base64.b64encode(node.id), base64.b64encode(dkey), old_value, pvalue, new_value)) self.storage[dkey] = json.dumps(new_value) except: _log.debug("Trying to append something not a JSON coded list %s" % value, exc_info=True) ds = [self.protocol.callAppend(n, dkey, value) for n in nodes] return defer.DeferredList(ds).addCallback(self._anyRespondSuccess) nearest = self.protocol.router.findNeighbors(node) if len(nearest) == 0: self.log.warning("There are no known neighbors to set key %s" % key) _log.debug("There are no known neighbors to set key %s" % key) return defer.succeed(False) spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) return spider.find().addCallback(append_)
def digest_set(self, dkey, value): """ Set the given SHA1 digest key to the given value in the network. """ node = Node(dkey) # this is useful for debugging messages hkey = binascii.hexlify(dkey) def store(nodes): self.log.info("setting '%s' on %s" % (hkey, map(str, nodes))) # if this node is close too, then store here as well if self.node.distanceTo(node) < max( [n.distanceTo(node) for n in nodes]): self.storage[dkey] = value ds = [self.protocol.callStore(n, dkey, value) for n in nodes] return defer.DeferredList(ds).addCallback(self._anyRespondSuccess) nearest = self.protocol.router.findNeighbors(node) if len(nearest) == 0: self.log.warning("There are no known neighbors to set key %s" % hkey) return defer.succeed(False) spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) return spider.find().addCallback(store)
def direct_message(self, nodeid, message): """Send direct message to a node. Spidercrawls the network to find the node and sends the message directly. This will fail if the node is behind a NAT and doesn't have a public ip. Args: nodeid: 160bit nodeid of the reciever as bytes message: iu-msgpack-python serializable message data Returns: Defered own transport address (ip, port) if successfull else None """ def found_callback(nodes): nodes = filter(lambda n: n.id == nodeid, nodes) if len(nodes) == 0: return defer.succeed(None) else: async = self.protocol.callDirectMessage(nodes[0], message) return async.addCallback(lambda r: r[0] and r[1] or None) node = KademliaNode(nodeid) nearest = self.protocol.router.findNeighbors(node) if len(nearest) == 0: return defer.succeed(None) spider = NodeSpiderCrawl( self.protocol, node, nearest, self.ksize, self.alpha ) return spider.find().addCallback(found_callback)
def remove(self, key, value): """ For the given key remove the given list values from the set in the network. """ dkey = digest(key) node = Node(dkey) _log.debug("Server:remove %s" % base64.b64encode(dkey)) def remove_(nodes): # if this node is close too, then store here as well max_distance = max([n.distanceTo(node) for n in nodes]) if nodes else sys.maxint if self.node.distanceTo(node) < max_distance: try: pvalue = json.loads(value) self.set_keys.add(dkey) if dkey in self.storage: old_value = json.loads(self.storage[dkey]) new_value = list(set(old_value) - set(pvalue)) self.storage[dkey] = json.dumps(new_value) _log.debug("%s local remove key: %s old: %s remove: %s new: %s" % (base64.b64encode(node.id), base64.b64encode(dkey), old_value, pvalue, new_value)) except: _log.debug("Trying to remove somthing not a JSON coded list %s" % value, exc_info=True) ds = [self.protocol.callRemove(n, dkey, value) for n in nodes] return defer.DeferredList(ds).addCallback(self._anyRespondSuccess) nearest = self.protocol.router.findNeighbors(node) if len(nearest) == 0: self.log.warning("There are no known neighbors to set key %s" % key) return defer.succeed(False) spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) return spider.find().addCallback(remove_)
async def _refresh_table(self): """ Refresh buckets that haven't had any lookups in the last hour (per section 2.3 of the paper). """ results = [] for node_id in self.protocol.get_refresh_ids(): node = Node(node_id) nearest = self.protocol.router.find_neighbors(node, self.alpha) spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) results.append(spider.find()) # do our crawling await asyncio.gather(*results)
async def set_digest(self, dkey, value): """ Set the given SHA1 digest key (bytes) to the given value in the network. """ node = Node(dkey) nearest = self.protocol.router.find_neighbors(node) if not nearest: log.warning("There are no known neighbors to set key %s", dkey.hex()) return False spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) nodes = await spider.find() log.info("setting '%s' on %s", dkey.hex(), list(map(str, nodes))) # if this node is close too, then store here as well biggest = max([n.distance_to(node) for n in nodes]) if self.node.distance_to(node) < biggest: self.storage[dkey] = value results = [self.protocol.call_store(n, dkey, value) for n in nodes] # return true only if at least one store call succeeded return any(await asyncio.gather(*results))
async def set_digest(self, dkey, value): """ Set the given SHA1 digest key (bytes) to the given value in the network. Returns True if a digest was in fact set. """ node = self.node_class(dkey) nearest = self.protocol.router.findNeighbors(node) if len(nearest) == 0: self.log.warning("There are no known neighbors to set key %s" % dkey.hex()) return False spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) nodes = await spider.find() self.log.info("setting '%s' on %s" % (dkey.hex(), list(map(str, nodes)))) # if this node is close too, then store here as well if self.node.distanceTo(node) < max( [n.distanceTo(node) for n in nodes]): self.storage[dkey] = value ds = [] for n in nodes: _disposition, value_was_set = await self.protocol.callStore( n, dkey, value) if value_was_set: self.digests_set += 1 ds.append(value_was_set) # return true only if at least one store call succeeded return any(ds)
def append(self, key, value): """ For the given key append the given list values to the set in the network. """ dkey = digest(key) node = Node(dkey) def append_(nodes): # if this node is close too, then store here as well if self.node.distanceTo(node) < max( [n.distanceTo(node) for n in nodes]): try: pvalue = json.loads(value) self.set_keys.add(dkey) if dkey not in self.storage: _log.debug( "%s local append key: %s not in storage set value: %s" % (base64.b64encode( node.id), base64.b64encode(dkey), pvalue)) self.storage[dkey] = value else: old_value_ = self.storage[dkey] old_value = json.loads(old_value_) new_value = list(set(old_value + pvalue)) _log.debug( "%s local append key: %s old: %s add: %s new: %s" % (base64.b64encode(node.id), base64.b64encode(dkey), old_value, pvalue, new_value)) self.storage[dkey] = json.dumps(new_value) except: _log.debug( "Trying to append something not a JSON coded list %s" % value, exc_info=True) ds = [self.protocol.callAppend(n, dkey, value) for n in nodes] return defer.DeferredList(ds).addCallback(self._anyRespondSuccess) nearest = self.protocol.router.findNeighbors(node) if len(nearest) == 0: self.log.warning("There are no known neighbors to set key %s" % key) _log.debug("There are no known neighbors to set key %s" % key) return defer.succeed(False) spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) return spider.find().addCallback(append_)
def remove(self, key, value): """ For the given key remove the given list values from the set in the network. """ dkey = digest(key) def remove(nodes): ds = [self.protocol.callRemove(node, dkey, value) for node in nodes] return defer.DeferredList(ds).addCallback(self._anyRespondSuccess) node = Node(dkey) nearest = self.protocol.router.findNeighbors(node) if len(nearest) == 0: self.log.warning("There are no known neighbors to set key %s" % key) return defer.succeed(False) spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) return spider.find().addCallback(remove)
async def _refresh_table(self): """ Refresh buckets that haven't had any lookups in the last hour (per section 2.3 of the paper). """ ds = [] for node_id in self.protocol.getRefreshIDs(): node = Node(node_id) nearest = self.protocol.router.findNeighbors(node, self.alpha) spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) ds.append(spider.find()) # do our crawling await asyncio.gather(*ds) # now republish keys older than one hour for dkey, value in self.storage.iteritemsOlderThan(3600): await self.set_digest(dkey, value)
async def _refresh_table(self): """ Refresh buckets that haven't had any lookups in the last hour (per section 2.3 of the paper). """ results = [] for node_id in self.protocol.get_refresh_ids(): node = Node(node_id) nearest = self.protocol.router.find_neighbors(node, self.alpha) spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) results.append(spider.find()) # do our crawling await asyncio.gather(*results) # now republish keys older than one hour for dkey, value in self.storage.iter_older_than(3600): await self.set_digest(dkey, value)
def set(self, key, value): """ Set the given key to the given value in the network. """ self.log.debug("setting '%s' = '%s' on network" % (key, value)) dkey = digest(key) def store(nodes): self.log.info("setting '%s' on %s" % (key, map(str, nodes))) ds = [self.protocol.callStore(node, dkey, value) for node in nodes] return defer.DeferredList(ds).addCallback(self._anyRespondSuccess) node = Node(dkey) nearest = self.protocol.router.findNeighbors(node) if len(nearest) == 0: self.log.warning("There are no known neighbors to set key %s" % key) return defer.succeed(False) spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) return spider.find().addCallback(store)
async def _refresh_table(self): """ Refresh buckets that haven't had any lookups in the last hour (per section 2.3 of the paper). """ ds = [] for node_id in self.protocol.getRefreshIDs(): log.debug('node_id=%s type %s', node_id, type(node_id)) node = Node(node_id) nearest = self.protocol.router.findNeighbors(node, self.alpha) spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) ds.append(spider.find()) # do our crawling await asyncio.gather(*ds) # now republish keys older than one hour for dkey, value in self.storage.iteritemsOlderThan(3600): await self.set_digest(dkey, value)
def refreshTable(self): """ Refresh buckets that haven't had any lookups in the last hour (per section 2.3 of the paper). """ ds = [] for id in self.protocol.getRefreshIDs(): node = Node(id) nearest = self.protocol.router.findNeighbors(node, self.alpha) spider = NodeSpiderCrawl(self.protocol, node, nearest) ds.append(spider.find()) def republishKeys(_): ds = [] # Republish keys older than one hour for key, value in self.storage.iteritemsOlderThan(3600): ds.append(self.set(key, value)) return defer.gatherResults(ds) return defer.gatherResults(ds).addCallback(republishKeys)
def remove(self, key, value): """ For the given key remove the given list values from the set in the network. """ dkey = digest(key) def remove(nodes): ds = [ self.protocol.callRemove(node, dkey, value) for node in nodes ] return defer.DeferredList(ds).addCallback(self._anyRespondSuccess) node = Node(dkey) nearest = self.protocol.router.findNeighbors(node) if len(nearest) == 0: self.log.warning("There are no known neighbors to set key %s" % key) return defer.succeed(False) spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) return spider.find().addCallback(remove)
async def lookup_ip(self, node_key): node_id = digest(node_key) node = Node(node_id) nearest = self.protocol.router.findNeighbors(node) spider = NodeSpiderCrawl(self.protocol, self.node, nearest, self.ksize, self.alpha) log.debug("Starting lookup for node_key {}".format(node_key)) res_node = await spider.find_ip(node_id=node_id) if type(res_node) == list: res_node = None log.debug('{} resolves to {}'.format(node_key, res_node)) return res_node
async def _find_neighbors_nodes(self, dkey): node = Node(dkey) nearest = self.protocol.router.findNeighbors(node) if len(nearest) == 0: log.warning("There are no known neighbors to set key %s", dkey.hex()) return False spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) nodes = await spider.find() return nodes
def direct_message(self, nodeid, message): """Send direct message to a node. Spidercrawls the network to find the node and sends the message directly. This will fail if the node is behind a NAT and doesn't have a public ip. Args: nodeid: 160bit nodeid of the reciever as bytes message: iu-msgpack-python serializable message data Returns: Defered own transport address (ip, port) if successfull else None """ hexid = binascii.hexlify(nodeid) self.log.debug("Direct messaging %s: %s" % (hexid, message)) def found_callback(nodes): nodes = filter(lambda n: n.id == nodeid, nodes) if len(nodes) == 0: msg = "{0} couldn't find destination node {1}" self.log.warning(msg.format(self.get_hex_id(), hexid)) return defer.succeed(None) else: self.log.debug("found node %s" % binascii.hexlify(nodes[0].id)) async = self.protocol.callDirectMessage(nodes[0], message) return async .addCallback(lambda r: r[0] and r[1] or None) node = Node(nodeid) nearest = self.protocol.router.findNeighbors(node) if len(nearest) == 0: msg = "{0} has no known neighbors to find {1}" self.log.warning(msg.format(self.get_hex_id(), hexid)) return defer.succeed(None) spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) return spider.find().addCallback(found_callback)
def remove(self, key, value): """ For the given key remove the given list values from the set in the network. """ dkey = digest(key) node = Node(dkey) _log.debug("Server:remove %s" % base64.b64encode(dkey)) def remove_(nodes): # if this node is close too, then store here as well if not nodes or self.node.distanceTo(node) < max([n.distanceTo(node) for n in nodes]): try: pvalue = json.loads(value) self.set_keys.add(dkey) if dkey in self.storage: try: old_value = json.loads(self.storage[dkey]) new_value = list(set(old_value) - set(pvalue)) except: # When the key have been used for single values or deleted it does not contain a list # Just empty it old_value = self.storage[dkey] new_value = [] self.storage[dkey] = json.dumps(new_value) _log.debug("%s local remove key: %s old: %s remove: %s new: %s" % (base64.b64encode(node.id), base64.b64encode(dkey), old_value, pvalue, new_value)) except: _log.debug("Trying to remove somthing not a JSON coded list %s" % value, exc_info=True) ds = [self.protocol.callRemove(n, dkey, value) for n in nodes] return defer.DeferredList(ds).addCallback(self._anyRespondSuccess) nearest = self.protocol.router.findNeighbors(node) if len(nearest) == 0: self.log.warning("There are no known neighbors to set key %s" % key) return defer.succeed(False) spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) return spider.find().addCallback(remove_)
def direct_message(self, nodeid, message): """Send direct message to a node. Spidercrawls the network to find the node and sends the message directly. This will fail if the node is behind a NAT and doesn't have a public ip. Args: nodeid: 160bit nodeid of the reciever as bytes message: iu-msgpack-python serializable message data Returns: Defered own transport address (ip, port) if successfull else None """ hexid = binascii.hexlify(nodeid) self.log.debug("Direct messaging %s: %s" % (hexid, message)) def found_callback(nodes): nodes = filter(lambda n: n.id == nodeid, nodes) if len(nodes) == 0: msg = "{0} couldn't find destination node {1}" self.log.warning(msg.format(self.get_hex_id(), hexid)) return defer.succeed(None) else: self.log.debug("found node %s" % binascii.hexlify(nodes[0].id)) async = self.protocol.callDirectMessage(nodes[0], message) return async.addCallback(lambda r: r[0] and r[1] or None) node = Node(nodeid) nearest = self.protocol.router.findNeighbors(node) if len(nearest) == 0: msg = "{0} has no known neighbors to find {1}" self.log.warning(msg.format(self.get_hex_id(), hexid)) return defer.succeed(None) spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) return spider.find().addCallback(found_callback)
async def bootstrap(self, addrs): """ Bootstrap the server by connecting to other known nodes in the network. Args: addrs: A `list` of (ip, port) `tuple` pairs. Note that only IP addresses are acceptable - hostnames will cause an error. """ log.debug("Attempting to bootstrap node with %i initial contacts", len(addrs)) cos = list(map(self.bootstrap_node, addrs)) gathered = await asyncio.gather(*cos) nodes = [node for node in gathered if node is not None] spider = NodeSpiderCrawl(self.protocol, self.node, nodes, self.ksize, self.alpha) return await spider.find()
async def _call_remote_persist(self, key, value: str): """ Set the given SHA1 digest key (bytes) to the given value in the network. """ dkey = digest(key) node = Node(dkey) nearest = self.protocol.router.findNeighbors(node) if len(nearest) == 0: log.warning("There are no known neighbors to set key %s", dkey.hex()) return False spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) nodes = await spider.find() log.info("setting '%s' on %s", dkey.hex(), list(map(str, nodes))) ds = [self.protocol.callStore(n, dkey, value) for n in nodes] # return true only if at least one store call succeeded return any(await asyncio.gather(*ds))
async def set_digest(self, dkey, value): """ Set the given SHA1 digest key (bytes) to the given value in the network. Returns True if a digest was in fact set. """ node = self.node_class(dkey) nearest = self.protocol.router.findNeighbors(node) if len(nearest) == 0: self.log.warning("There are no known neighbors to set key %s" % dkey.hex()) return False spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) nodes = await spider.find() while isinstance(nodes, types.CoroutineType): # This is awful. self.log.warning("Didn't get a list of nodes from spider.find(). Got {} instead.".format(nodes)) nodes = await nodes self.log.warning("Cast nodes to {}".format(nodes)) self.log.info("setting '%s' on %s" % (dkey.hex(), list(map(str, nodes)))) # if this node is close too, then store here as well if self.node.distanceTo(node) < max([n.distanceTo(node) for n in nodes]): self.storage[dkey] = value ds = [] for n in nodes: if self.node.id == n.id: # TOOD: Consider whether to store stuff locally. We don't really know yet. Probably at least some things. ds.append(False) else: disposition, value_was_set = await self.protocol.callStore(n, dkey, value) if value_was_set: self.digests_set += 1 ds.append(value_was_set) # return true only if at least one store call succeeded return any(ds)
async def bootstrap(self, addrs=None, flooding=False, iprange_l=None, iprange_r=None, ports=None): """ Bootstrap the server by connecting to other known nodes in the network. Args: addrs: A `list` of (ip, port) `tuple` pairs. Note that only IP addresses are acceptable - hostnames will cause an error. If None, flooding must be True and a prefix of ip must be provided. flooding: Boolean, default False when addrs is not empty. iprange_l, iprange_r: Str, the range of ips to flood. E.g. if iprange_l='192.168.2.3', iprange_r='192.168.2.239', then the range of flooding is 192.168.2.3~192.168.2.238. ports: A 'list' of possible ports. E.g. list(range(8468, 8990)); [8468, 8470]. """ if addrs is None: if flooding == False: raise Exception( 'NoneType of addrs: flooding must be True in this case!') # flooding through all ips under the given prefix print('In flooding mode:') addrs = to_flood(iprange_l, iprange_r, ports) log.debug("Attempting to bootstrap node with %i initial contacts", len(addrs)) cos = list(map(self.bootstrap_node, addrs)) gathered = await asyncio.gather(*cos) nodes = [node for node in gathered if node is not None] spider = NodeSpiderCrawl(self.protocol, self.node, nodes, self.ksize, self.alpha) return await spider.find()
async def set_digest(self, dkey: bytes, value: bytes, store_local=False): """ Set the given Keccak digest key (bytes) to the given value in the network. """ log.debug("Setting digest '%s' = '%s' (store_local=%s) over kademlia network", dkey, value, store_local) node = Node(dkey) # log.debug("Number of keys in storage %s", self.storage.size) if store_local: log.debug("Storing dkey=%s to local storage", dkey) self.storage[dkey] = value # this saves always to local storage nearest = self.protocol.router.findNeighbors(node) if len(nearest) == 0: log.warning("There are no known neighbors to set key %s", dkey.hex()) return False spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) nodes = await spider.find() for n in nodes: log.debug('Spider found node %s', n) # log.info("setting '%s' on %s", dkey.hex(), list(map(str, nodes))) # if this node is close too, then store here as well biggest = max([n.distanceTo(node) for n in nodes]) if self.node.distanceTo(node) < biggest: self.storage[dkey] = value for n in nodes: log.debug("Asking node %s to store key=%s", n, dkey) ds = [self.protocol.callStore(n, dkey, value) for n in nodes] # return true only if at least one store call succeeded return any(await asyncio.gather(*ds))