def test_distance_calculation(self): ridone = hashlib.sha1(str(random.getrandbits(255)).encode()) ridtwo = hashlib.sha1(str(random.getrandbits(255)).encode()) shouldbe = int(ridone.hexdigest(), 16) ^ int(ridtwo.hexdigest(), 16) none = Node(ridone.digest()) ntwo = Node(ridtwo.digest()) self.assertEqual(none.distance_to(ntwo), shouldbe)
def rpc_find_node(self, sender, nodeid, key): log.info("finding neighbors of %i in local table", int(nodeid.hex(), 16)) source = Node(nodeid, sender[0], sender[1]) self.welcome_if_new(source) node = Node(key) neighbors = self.router.find_neighbors(node, exclude=source) return list(map(tuple, neighbors))
async def set_digest(self, dkey, value): """ Set the given SHA1 digest key (bytes) to the given value in the network. """ self.storage[dkey] = value node = Node(dkey) nearest = self.protocol.router.find_neighbors(node) if not nearest: log.warning("There are no known neighbors to set dkey %s", dkey.hex()) return False spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) nodes = await spider.find() log.info("setting '%s' on %s", dkey.hex(), list(map(str, nodes))) # if this node is close too, then store here as well #biggest = max([n.distance_to(node) for n in nodes]) #if self.node.distance_to(node) < biggest: # self.storage[dkey] = value results = [self.protocol.call_store(n, dkey, value) for n in nodes] # return true only if at least one store call succeeded return any(await asyncio.gather(*results))
def get_node_list(self): """ Get the node list in the response. If there's no value, this should be set. """ nodelist = self.response[1] or [] return [Node(*nodeple) for nodeple in nodelist]
def rpc_find_value(self, sender, nodeid, key): source = Node(nodeid, sender[0], sender[1]) self.welcome_if_new(source) value = self.storage.get(key, None) if value is None: return self.rpc_find_node(sender, nodeid, key) return {'value': value}
def rpc_store(self, sender, nodeid, key, value): source = Node(nodeid, sender[0], sender[1]) self.welcome_if_new(source) log.debug("got a store request from %s, storing '%s'='%s'", sender, key.hex(), value) self.storage[key] = value return True
def welcome_if_new(self, node): """ Given a new node, send it all the keys/values it should be storing, then add it to the routing table. @param node: A new node that just joined (or that we just found out about). Process: For each key in storage, get k closest nodes. If newnode is closer than the furtherst in that list, and the node for this server is closer than the closest in that list, then store the key/value on the new node (per section 2.5 of the paper) """ if not self.router.is_new_node(node): return log.info("never seen %s before, adding to router", node) for key, value in self.storage: keynode = Node(digest(key)) neighbors = self.router.find_neighbors(keynode) if neighbors: last = neighbors[-1].distance_to(keynode) new_node_close = node.distance_to(keynode) < last first = neighbors[0].distance_to(keynode) this_closest = self.source_node.distance_to(keynode) < first if not neighbors or (new_node_close and this_closest): asyncio.ensure_future(self.call_store(node, key, value), loop=self.loop) self.router.add_contact(node)
def mknode(node_id=None, ip_addy=None, port=None, intid=None): """ Make a node. Created a random id if not specified. """ if intid is not None: node_id = pack('>l', intid) if not node_id: randbits = str(random.getrandbits(255)) node_id = hashlib.sha1(randbits.encode()).digest() return Node(node_id, ip_addy, port)
async def get_digest(self, dkey): """ Get a given SHA1 digest key (bytes) if the network has it. Returns: :class:`None` if not found, the value otherwise. """ # if this node has it, return it if self.storage.get(dkey) is not None: return self.storage.get(dkey) node = Node(dkey) nearest = self.protocol.router.find_neighbors(node) if not nearest: log.warning("There are no known neighbors to get dkey %s", dkey.hex()) return None spider = ValueSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) return await spider.find()
def __init__(self, ksize=20, alpha=3, node_id=None, storage=None): """ Create a server instance. This will start listening on the given port. Args: ksize (int): The k parameter from the paper alpha (int): The alpha parameter from the paper node_id: The id for this node on the network. storage: An instance that implements :interface:`~kademlia.storage.IStorage` """ self.ksize = ksize self.alpha = alpha self.storage = storage or ForgetfulStorage() self.node = Node(node_id or digest(random.getrandbits(255))) self.transport = None self.protocol = None self.refresh_loop = None self.save_state_loop = None self.port = None
async def _refresh_table(self): """ Refresh buckets that haven't had any lookups in the last hour (per section 2.3 of the paper). """ results = [] for node_id in self.protocol.get_refresh_ids(): node = Node(node_id) nearest = self.protocol.router.find_neighbors(node, self.alpha) spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) results.append(spider.find()) # do our crawling await asyncio.gather(*results) # now republish keys older than one hour for dkey, value in self.storage.iter_older_than(3600): await self.set_digest(dkey, value)
async def bootstrap_node(self, addr): result = await self.protocol.ping(addr, self.node.id) return Node(result[1], addr[0], addr[1]) if result[0] else None
def __init__(self, source_id, ksize=20): self.router = RoutingTable(self, ksize, Node(source_id)) self.storage = {} self.source_id = source_id
def test_long_id(self): rid = hashlib.sha1(str(random.getrandbits(255)).encode()).digest() node = Node(rid) self.assertEqual(node.long_id, int(rid.hex(), 16))
def rpc_ping(self, sender, nodeid): source = Node(nodeid, sender[0], sender[1]) self.welcome_if_new(source) return self.source_node.id