def search(self, what, cat='all'): """ Performs search """ connection = https("thepiratebay.gd") #prepare query. 7 is filtering by seeders cat = cat.lower() query = "/".join(("/search", what, "0", "7", self.supported_categories[cat])) connection.request("GET", query) response = connection.getresponse() if response.status != 200: return list_searches = [] parser = self.MyHtmlParseWithBlackJack(list_searches, self.url) parser.feed(response.read().decode('utf-8')) parser.close() parser.add_query = False for search_query in list_searches: connection.request("GET", search_query) response = connection.getresponse() parser.feed(response.read().decode('utf-8')) parser.close() connection.close() return
def main(args): if not args: return key = "" #enter API key auth = b64encode(bytes("api:" + key, "ascii")).decode("ascii") headers = {"Authorization" : " ".join(("Basic", auth))} connection = https("api.tinypng.com") for picture_file in gen_file_list(args): print(" ".join((">>>Shrink pic:", picture_file))) result_dir = path_join(dirname(picture_file), "tiny_png_optimized") if not isdir(result_dir): mkdir(result_dir) output = path_join(result_dir, basename(picture_file)) connection.request("POST", "https://api.tinypng.com/shrink", open(picture_file, "rb").read(), headers) response = connection.getresponse() if response.status == 201: # Compression was successful, retrieve output from Location header. response.read() connection.request("GET", response.getheader("Location")) result = connection.getresponse() open(output, "wb").write(result.read()) print(" ".join(("Succesfuly shrinked. Result pic:", output))) else: # Something went wrong! You can parse the JSON body for details. print(" ".join(("Failed to compress:", picture_file, "Status:", str(response.status)))) print(" ".join(("Reason:", response.reason))) response.read()
def main(args): if not args: return key = "" #enter API key auth = b64encode(bytes("api:" + key, "ascii")).decode("ascii") headers = {"Authorization": " ".join(("Basic", auth))} connection = https("api.tinypng.com") for picture_file in gen_file_list(args): print(" ".join((">>>Shrink pic:", picture_file))) result_dir = path_join(dirname(picture_file), "tiny_png_optimized") if not isdir(result_dir): mkdir(result_dir) output = path_join(result_dir, basename(picture_file)) connection.request("POST", "https://api.tinypng.com/shrink", open(picture_file, "rb").read(), headers) response = connection.getresponse() if response.status == 201: # Compression was successful, retrieve output from Location header. response.read() connection.request("GET", response.getheader("Location")) result = connection.getresponse() open(output, "wb").write(result.read()) print(" ".join(("Succesfuly shrinked. Result pic:", output))) else: # Something went wrong! You can parse the JSON body for details. print(" ".join(("Failed to compress:", picture_file, "Status:", str(response.status)))) print(" ".join(("Reason:", response.reason))) response.read()
def search(self, what, cat='all'): """ Performs search """ connection = https("www.demonoid.pw") #prepare query cat = self.supported_categories[cat.lower()] query = "".join(("/files/?category=", cat, "&subcategory=All&quality=All&seeded=2&external=2&query=", what, "&to=1&uid=0&sort=S")) connection.request("GET", query) response = connection.getresponse() if response.status != 200: return data = response.read().decode("utf-8") add_res_list = re_compile("/files.*page=[0-9]+") torrent_list = re_compile("start torrent list -->(.*)<!-- end torrent", DOTALL) data = torrent_list.search(data).group(0) list_results = add_res_list.findall(data) parser = self.MyHtmlParseWithBlackJack(self.url) parser.feed(data) del data if list_results: for search_query in islice((add_res_list.search(result).group(0) for result in list_results[1].split(" | ")), 0, 5): connection.request("GET", search_query) response = connection.getresponse() parser.feed(torrent_list.search(response.read().decode('utf-8')).group(0)) parser.close() connection.close() return
def tox_vim_updater(): """ Main function """ old_date = tuple(int(number) for number in read_config()) connection = https("tuxproject.de") connection.request("GET", "/projects/vim/") response = connection.getresponse() if response.status != 200: print("Failed to connect. Reason:", response.reason) return data = response.read().decode('utf-8') check_date = re_compile("[0-9]{4,}(-[0-9]{2,}){2,}") check_version = re_compile("[0-9]+.[0-9]+.[0-9]+") result_date = check_date.search(data) result_date = result_date.group(0) date = tuple(int(number) for number in result_date.split("-")) if not date > old_date: print("Vim is up-to-date") return result_version = check_version.search(data) version = result_version.group(0) print("New build is found:") print("Version:", version) print("Build date:", result_date) #update config with open(CONFIG_FILE, "w") as config: config.write(result_date) #64bit connection.request("GET", "/projects/vim/complete-x64.7z") response = connection.getresponse() if response.status != 200: print("Failed to connect. Reason:", response.reason) return with open("vim-x64.7z", "wb") as vim_file: vim_file.write(response.read()) print("Succesfully downloaded vim-x64.7z")
def search(self, what, cat='all'): """ Performs search """ connection = https("www.demonoid.pw") #prepare query cat = self.supported_categories[cat.lower()] query = "".join( ("/files/?category=", cat, "&subcategory=All&quality=All&seeded=2&external=2&query=", what, "&to=1&uid=0&sort=S")) connection.request("GET", query) response = connection.getresponse() if response.status != 200: return data = response.read().decode("utf-8") add_res_list = re_compile("/files.*page=[0-9]+") torrent_list = re_compile("start torrent list -->(.*)<!-- end torrent", DOTALL) data = torrent_list.search(data).group(0) list_results = add_res_list.findall(data) parser = self.MyHtmlParseWithBlackJack(self.url) parser.feed(data) del data if list_results: for search_query in islice( (add_res_list.search(result).group(0) for result in list_results[1].split(" | ")), 0, 5): connection.request("GET", search_query) response = connection.getresponse() parser.feed( torrent_list.search( response.read().decode('utf-8')).group(0)) parser.close() connection.close() return