def search(self, what, cat='all'): """ Performs search """ connection = https("thepiratebay.gd") #prepare query. 7 is filtering by seeders cat = cat.lower() query = "/".join(("/search", what, "0", "7", self.supported_categories[cat])) connection.request("GET", query) response = connection.getresponse() if response.status != 200: return list_searches = [] parser = self.MyHtmlParseWithBlackJack(list_searches, self.url) parser.feed(response.read().decode('utf-8')) parser.close() parser.add_query = False for search_query in list_searches: connection.request("GET", search_query) response = connection.getresponse() parser.feed(response.read().decode('utf-8')) parser.close() connection.close() return
def search(self, what, cat='all'): """ Performs search """ connection = https("thepiratebay.gd") #prepare query. 7 is filtering by seeders cat = cat.lower() query = "/".join( ("/search", what, "0", "7", self.supported_categories[cat])) connection.request("GET", query) response = connection.getresponse() if response.status != 200: return list_searches = [] parser = self.MyHtmlParseWithBlackJack(list_searches, self.url) parser.feed(response.read().decode('utf-8')) parser.close() parser.add_query = False for search_query in list_searches: connection.request("GET", search_query) response = connection.getresponse() parser.feed(response.read().decode('utf-8')) parser.close() connection.close() return
def search(self, what, cat='all'): """ Performs search """ connection = https("www.demonoid.pw") #prepare query cat = self.supported_categories[cat.lower()] query = "".join(("/files/?category=", cat, "&subcategory=All&quality=All&seeded=2&external=2&query=", what, "&to=1&uid=0&sort=S")) connection.request("GET", query) response = connection.getresponse() if response.status != 200: return data = response.read().decode("utf-8") add_res_list = re_compile("/files.*page=[0-9]+") torrent_list = re_compile("start torrent list -->(.*)<!-- end torrent", DOTALL) data = torrent_list.search(data).group(0) list_results = add_res_list.findall(data) parser = self.MyHtmlParseWithBlackJack(self.url) parser.feed(data) del data if list_results: for search_query in islice((add_res_list.search(result).group(0) for result in list_results[1].split(" | ")), 0, 5): connection.request("GET", search_query) response = connection.getresponse() parser.feed(torrent_list.search(response.read().decode('utf-8')).group(0)) parser.close() connection.close() return
def search(self, query, cat='all'): """ Performs search via this engine """ #connect to tracker and get initial results #TODO: handle ssl problem? connection = https("oldpiratebay.org") query = "".join(("/search.php?q=", query)) connection.request("GET", query) response = connection.getresponse() list_searches = [] parser = self.MyHtmlParseWithBlackJack(list_searches, self.url) parser.feed(response.read().decode('utf-8')) parser.close() parser.first_look = False #continue if there are more results(no more than 10 pages) for next_query in list_searches: connection.request("GET", next_query) response = connection.getresponse() parser.feed(response.read().decode('utf-8')) parser.close() connection.close()