def send_find_node(self, node): query = { "t" : dht_id.gen_tid(), "y" : "q", "q" : "find_node", "a" : { "id" : self.crawler_nid, "target" : dht_id.gen_random_nid() } } bquery = bencode(query) self.sock.sendto(bquery, (node.ip, node.port))
def __init__(self, ip, port, max_node_size): Thread.__init__(self) # self.setDaemon(True) self.ip = ip self.port = port # self.nodes = Queue(maxsize = max_node_size) self.nodes = dht_node.Nodes(max_node_qsize = max_node_size) self.crawler_nid = dht_id.gen_random_nid() self.is_crawling = False self.join_dht_thread = Thread(target=self.join_dht) self.join_dht_thread.setDaemon(True) self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) self.sock.bind((self.ip, self.port))
def process_get_peers_request(crawler, msg, address): # 加入nodes crawler.nodes.add(dht_node.Node(nid=msg[b'a'][b'id'], ip=address[0], port=address[1])) # 最近8个邻居节点信息 close_nodes = crawler.nodes.get_close_nodes() # 发送response response = { 't': msg[b't'], 'y':'r', 'r':{ 'id':crawler.crawler_nid, 'token':dht_id.gen_random_nid(), 'nodes':dht_node.encode_nodes(close_nodes) } } send_response(crawler.sock, address, response)