def main(self): ''' ipinfo checker main ''' print(self.NAME + colorama.Fore.GREEN + "Starting ASN check...") ################################## self.lock = asyncio.Lock() loop = asyncio.get_event_loop() ignore_aiohttp_ssl_eror(loop) tasks = [] ############################## for i in range(0, self.TASKS): tasks.append(loop.create_task(self.asnChecker())) ######################################################## #self.event = asyncio.Event() #tasks.append(loop.create_task(tools.awaiter(self.time, self.event, self.NAME, self.proxies))) ########################################################################### try: loop_response = loop.run_until_complete( asyncio.gather(*tasks, return_exceptions=True)) except KeyboardInterrupt: print("\n" + self.NAME + colorama.Fore.YELLOW + "Check cancelled! Exiting...") for i in tasks: i.cancel() except Exception as e: raise e else: tools.loopResponse(loop_response, "ipinfo") ####################################################################### with open("trashproxies/died.txt", mode="a", encoding="UTF-8") as file: for i in self.died: file.write(i.normal + "\n") ######################################################################### with open("trashproxies/badASN.txt", mode="a", encoding="UTF-8") as file: for i in self.bad: file.write(i.normal + "\n") ########################################################## print(self.NAME + colorama.Fore.GREEN + f"Bad proxies in trashproxies/died.txt, {len(self.died)}") print( self.NAME + colorama.Fore.GREEN + f"Proxy with blacklist ASN in trashproxies/badASN.txt, {len(self.died)}" ) print(self.NAME + colorama.Fore.GREEN + "ASN check finished!") return self.green
def main(self): print(self.NAME + colorama.Fore.GREEN + "Starting user`s request check...") ################### self.checkingData() self.lock = asyncio.Lock() loop = asyncio.get_event_loop() tasks = [] ignore_aiohttp_ssl_eror(loop) ############################# for i in range(0, self.TASKS): tasks.append(loop.create_task(self.startingCheck())) ############# try: loop_response = loop.run_until_complete( asyncio.gather(*tasks, return_exceptions=True)) except KeyboardInterrupt: print("\n" + self.NAME + colorama.Fore.YELLOW + "Check cancelled! Exiting...") for i in tasks: i.cancel() except Exception as e: raise e else: tools.loopResponse(loop_response, "user`s checker") ######################## with open("trashproxies/died.txt", mode="a", encoding="UTF-8") as file: for i in self.died: file.write(i.normal + "\n") ######################### with open("trashproxies/userBad.txt", mode="a", encoding="UTF-8") as file: for i in self.bad: file.write(i.normal + "\n") ########################### print(self.NAME + colorama.Fore.GREEN + f"Bad proxies in trashproxies/died.txt, {len(self.died)}") print(self.NAME + colorama.Fore.GREEN + f"User-bad proxies in trashproxies/userBad, {len(self.bad)}") print(self.NAME + colorama.Fore.GREEN + "User`s check finished!") return self.green
def main(self): ''' Start function for start parser object ''' print(self.NAME + colorama.Fore.GREEN + "Started parse...") ############################################## self.inputLinks = copy.deepcopy(self.MAINURLS) self.lock = asyncio.Lock() loop = asyncio.get_event_loop() tasks = [] ignore_aiohttp_ssl_eror(loop) ############################# for i in range(0, 3): ############################## for i in range(0, self.TASKS): tasks.append(loop.create_task(self.getWebPage())) #### try: loop_response = loop.run_until_complete(asyncio.gather(*tasks, return_exceptions=True)) except Exception as e: if isinstance(e, (KeyboardInterrupt, RuntimeError, AttributeError)): print("\n" + self.NAME + colorama.Fore.YELLOW + "Parsing stopped! Cancelling all tasks...") for i in tasks: i.cancel() break else: raise e finally: self.inputLinks.clear() self.inputLinks = copy.deepcopy(self.links) self.links.clear() ######################################### for i in self.links: if "zip" in i: self.zipLinks.add(i) else: self.inputLinks.add(i) ######################################### print(self.NAME + colorama.Fore.GREEN + "Analyze founded URLs...") self.inputLinks = copy.deepcopy(self.zipLinks) ############################################## for i in range(0, self.TASKS): tasks.append(loop.create_task(self.getWebPage())) ######################################################### try: loop_response = loop.run_until_complete(asyncio.gather(*tasks, return_exceptions=True)) except Exception as e: if isinstance(e, (KeyboardInterrupt, RuntimeError, AttributeError)): print("\n" + self.NAME + colorama.Fore.YELLOW + "Parsing stopped! Cancelling all tasks...") for i in tasks: i.cancel() else: raise e else: tools.loopResponse(loop_response, "parser archives") ############################################################# # downloading archives self.inputLinks = copy.deepcopy(self.linksForDownload) print(self.NAME + colorama.Fore.GREEN + "Downloading .zip archives...") ############################################################# for i in range(0, self.TASKS): tasks.append(loop.create_task(self.DownloadArchive())) ############################################################# try: loop_response = loop.run_until_complete(asyncio.gather(*tasks, return_exceptions=True)) except Exception as e: if isinstance(e, (KeyboardInterrupt, RuntimeError, AttributeError)): print("\n" + self.NAME + colorama.Fore.YELLOW + "Parsing stopped! Cancelling all tasks...") for i in tasks: i.cancel() else: raise e else: tools.loopResponse(loop_response, "download archives") ################################################### try: self.zipExtractor() except Exception as e: if "downloads" in os.getcwd(): os.chdir("..") if os.access("downloads", mode=0): rmtree("downloads") tools.log(e, "open archives") ################################################################# print(self.NAME + colorama.Fore.GREEN + "Finished parsing proxies.") print(self.NAME + f"Got {str(len(self.proxies))} proxies.") self.proxies = [*self.proxies] ################################################################# return self.proxies