def _scrape_http(self, retry_attempt=False): tracker = self.serv_type + "://" + self.URL + "/scrape" hashed = binascii.a2b_hex(self.info_hash) hashed = urllib.quote_plus(hashed) encoding = urllib.urlencode({ 'infohash' : self.info_hash}) url = tracker + "?info_hash=" + hashed try: txt = urlopen(url, timeout=10).read() except URLError: if(retry_attempt): return None else: print("Error: Scrape of " + self.URL + ". Trying again...") return self._scrape_http(True) data = bencode.bdecode(txt) return (util.dict_find("complete", data), util.dict_find("downloaded", data), util.dict_find("incomplete", data))
def _announce_http(self, num_want, retry_attempt=False): tracker = self.serv_type + "://" + self.URL + ":" + str(self.port) +"/announce" hashed = binascii.a2b_hex(self.info_hash) hashed = urllib.quote_plus(hashed) url = tracker + "?info_hash=" + hashed + "&peer_id=12345678987654321234&port=" + str(self.port) + "&uploaded=0&downloaded=0&left=0&compact=1&event=started&numwant=" + str(num_want) try: txt = urlopen(url, timeout=10).read() except URLError: if(retry_attempt): print("Error, could not open URL " + url) return None else: print("Error, cannot open URL " + url + ". Trying again...") return self._announce_http(num_want, True) data = bencode.bdecode(txt) try: if data["failure reason"]: print("Error: HTTP server " + self.URL + " returned error '" + data["failure reason"] + "' on announce request.") return None except KeyError: pass peers = util.dict_find('peers', data) index = 0 if(not peers == '' and (len(peers[index:(index + 4)]) == 4)): peer_IPs = [] for i in range(0,num_want - 1): try: ip = socket.inet_ntoa(peers[index:(index + 4)]) #u_port = struct.unpack(">H", res[index + 4: index + 6]) peer_IPs.append(ip) index += 6 except socket.error: return None else: return None return peer_IPs