def get_fresh_proxy(): proxies = asyncio.Queue() broker = Broker(proxies) tasks = asyncio.gather(broker.find(types=['HTTP', 'HTTPS'], limit=10), save(proxies)) loop = asyncio.get_event_loop() loop.run_until_complete(tasks)
def main(): loop = asyncio.get_event_loop() proxies = asyncio.Queue(loop=loop) judges = ['http://httpbin.org/get?show_env', 'https://httpbin.org/get?show_env'] providers = ['http://www.proxylists.net/', 'http://fineproxy.org/eng/'] broker = Broker( proxies, timeout=8, max_conn=200, max_tries=3, verify_ssl=False, judges=judges, providers=providers, loop=loop) types = [('HTTP', ('Anonymous', 'High')), 'HTTPS'] countries = ['US', 'DE', 'FR'] urls = ['http://httpbin.org/get', 'https://httpbin.org/get', 'http://httpbin.org/redirect/1', 'http://httpbin.org/status/404'] proxy_pool = ProxyPool(proxies) tasks = asyncio.gather( broker.find(types=types, countries=countries, post=False, strict=True, limit=10), get_pages(urls, proxy_pool, loop=loop)) loop.run_until_complete(tasks) broker.show_stats(verbose=True)
def main(): print("Getting proxies") proxies = asyncio.Queue() broker = Broker(proxies, timeout=2, max_tries=2, grab_timeout=3600) tasks = asyncio.gather(broker.find(types=['HTTP', 'HTTPS']), save(proxies)) loop = asyncio.get_event_loop() loop.run_until_complete(tasks)
def get_proxies(proxies): queue = asyncio.Queue() broker = Broker(queue) tasks = asyncio.gather(broker.find(types=['HTTPS'], limit=10), read_queue(queue, proxies)) loop = asyncio.get_event_loop() loop.run_until_complete(tasks)
def get_proxies(limit: int = 10): """ Собираем прокси с помощью proxybroker :return: proxies_list_get : список найденных прокси: list :param limit: лимит на колличество найденных прокси: int """ loop = asyncio.get_event_loop() proxies = asyncio.Queue() broker = Broker(proxies, timeout=12, max_conn=200, max_tries=2, verify_ssl=False, loop=loop) tasks = asyncio.gather(broker.grab(countries=['RU'], limit=limit), save_proxies(proxies, filename=PROXIES)) loop = asyncio.get_event_loop() loop.run_until_complete(tasks) # записываем собранное в proxies_list_get with open(PROXIES, 'r') as prx_row: proxies_list_get = prx_row.read().split('\n') l_message(gfn(), f'proxies_list_get {str(proxies_list_get)}', color=Nm.BColors.OKBLUE) return proxies_list_get
def get_proxies(self): self.consults += 1 if self.consults < 10: try: logger.info("ProxyGetter: ---> Starting to get proxies") proxies = asyncio.Queue() broker = Broker(proxies) tasks = asyncio.gather( broker.find(types=self.types, limit=self.limit, countries=self.countries_list), self.append_proxies(proxies)) loop = asyncio.get_event_loop() loop.run_until_complete(tasks) self.retries = 0 except RuntimeError: self.retries += 1 logger.info( "ProxyGetter: ---> Getproxy fail, waiting {} to the next try" .format(5 * self.retries)) sleep(5 * self.retries) self.get_proxies() else: sleep(5) self.get_proxies()
def getProxies(type, nr): proxies = asyncio.Queue() broker = Broker(proxies) tasks = asyncio.gather(broker.find(types=[type], limit=nr), save(proxies, filename='proxies.txt')) loop = asyncio.get_event_loop() loop.run_until_complete(tasks)
async def proxies(ctx, amount: int): em = discord.Embed( title="Auroris Proxy Scraper", description= "Note - Proxies are scraped from public sources, so all may not be secure or fully functional.", color=0x00a8ff) proxies = asyncio.Queue() broker = Broker(proxies) if amount < 50: await asyncio.gather( broker.find(types=['HTTP', 'HTTPS'], limit=amount), show(proxies)) else: await asyncio.gather(broker.find(types=['HTTP', 'HTTPS'], limit=50), show(proxies)) proxies_str = "" for item in bot.proxy_array: item = str(item).split(']')[1][:-1] proxies_str += item + "\n" em.add_field(name="Proxies", value=proxies_str) em.set_footer(text=str(json_file["bot_embed_footer_text"]), icon_url=str(json_file["bot_embed_logo"])) await ctx.send(embed=em) bot.proxy_array = []
def main(): proxies = asyncio.Queue() broker = Broker(proxies) tasks = asyncio.gather(broker.find(types=['SOCKS4'], limit=number), save(proxies, filename=filename)) loop = asyncio.get_event_loop() loop.run_until_complete(tasks)
def get_proxies(timeout=20, broker_timeout=7, max_conn=150, max_tries=3, limit=40): exceptions = 0 print('Loading proxy list') try: proxy_list.clear() setup_proxy(reset=True) proxies = asyncio.Queue() broker = Broker(proxies, timeout=broker_timeout, max_conn=max_conn, max_tries=max_tries) tasks = asyncio.gather(broker.find(types=['SOCKS5'], limit=limit), save_proxy(proxies)) loop = asyncio.get_event_loop() loop.run_until_complete(asyncio.wait_for(tasks, timeout)) print('Loaded proxies:', colored(len(proxy_list), 'cyan')) except Exception as e: print(colored('Error while loading proxies.', 'red'), e) time.sleep(5) pass finally: broker.stop() tasks.cancel()
def get_proxy_ips(self, limit=1): # https://proxybroker.readthedocs.io/en/latest/examples.html# # NEED TO COMMENT OUT THE REMOVAL OF IP_CHECKERS IN LINE 90 OF # resolver.py within the ProxyBroker Code! Otherwise, it will # fail after doing the refresh. host, port = '127.0.0.1', 8888 # by default types = [('HTTP', 'High'), 'HTTPS', 'CONNECT:80'] codes = [200, 301, 302] proxies = asyncio.Queue() broker = Broker(proxies, max_tries=2) new_list = [] try: logging.info("Gathering proxies using ProxyBroker API") tasks = asyncio.gather( broker.find(types=['HTTP', 'HTTPS'], limit=limit), self.show(proxies, new_list)) logging.debug("Proxy Gathering Finished.") loop = asyncio.get_event_loop() logging.debug("Got Event Loop.") loop.run_until_complete(tasks) logging.info("Ran until complete.") broker.stop() logging.info("Broker Stopped Successfully.") except Exception as e: logging.error( "Error encountered when collecting Proxies in get_proxy_ips()." ) logging.error(e, exc_info=True) return new_list
def proxyFINDER(): os.system("clear") print(""" ███████╗██╗███╗ ██╗██████╗ ███████╗██████╗ ██╔════╝██║████╗ ██║██╔══██╗██╔════╝██╔══██╗ █████╗ ██║██╔██╗ ██║██║ ██║█████╗ ██████╔╝ ██╔══╝ ██║██║╚██╗██║██║ ██║██╔══╝ ██╔══██╗ ██║ ██║██║ ╚████║██████╔╝███████╗██║ ██║ ╚═╝ ╚═╝╚═╝ ╚═══╝╚═════╝ ╚══════╝╚═╝ ╚═╝ """) limitproxy = int(input("limit: ")) typeproxy = input("types[HTTP(s),SOCKS4/5]: ") timeoutproxy = int(input("timeout[SECONDS]: ")) print("-------------------------------") async def show(proxies): while True: proxy = await proxies.get() if proxy is None: break print("New Proxy: %s" % proxy) proxies = asyncio.Queue() broker = Broker(proxies) tasks = asyncio.gather( broker.find(types=[typeproxy], limit=limitproxy, timeout=timeoutproxy), show(proxies)) loop = asyncio.get_event_loop() loop.run_until_complete(tasks) print("-------------------------------")
def main(): proxies = asyncio.Queue() broker = Broker(proxies) tasks = asyncio.gather(broker.grab(countries=['US', 'GB'], limit=10), save(proxies, filename='proxies.txt')) loop = asyncio.get_event_loop() loop.run_until_complete(tasks)
def get_one_random_proxy(types='HTTP'): """ Find one new, working proxy from any country. :author: Sebastian :param types: The type of proxy to search for as a list of strings. Defaults to HTTP. If only one type should be specified a string like "HTTPS" will also work. Other possibilities are HTTPS, SOCKS4, SOCKS5. E.g. types=['HTTP, HTTPS'] :return:The newly found proxys location (two-letter-iso country code) as well as the proxy (in <Proxy IP>:<Port> notation). """ logger("Fetching one random proxy") if type(types) is not list: types = [types] try: loop = asyncio.get_event_loop() except RuntimeError: logger("----New event loop") loop = asyncio.new_event_loop() proxies = asyncio.Queue(loop=loop) broker = Broker(proxies, loop=loop) loop.run_until_complete(broker.find(limit=1, types=types)) while True: proxy = proxies.get_nowait() if proxy is None: break fetched_proxy = "{}:{}".format(proxy.host, str(proxy.port)) country = proxy.geo["code"] logger("Proxy from {} is: {}".format(country, fetched_proxy)) _add_to_proxy_list(country, fetched_proxy) return country, fetched_proxy return None, None
def get_proxy(): proxies = asyncio.Queue() # coroutine queue broker = Broker(proxies, timeout=8) promise = asyncio.gather(broker.find(types=['HTTPS'], limit=100), get_stuff(proxies)) loop = asyncio.get_event_loop() _, proxy_list = loop.run_until_complete(promise) # returns as many values, as many tasks we have write_to_file(proxy_list)
def main(): proxies = asyncio.Queue() broker = Broker(proxies) tasks = asyncio.gather(broker.find(types=['HTTP', 'HTTPS'], limit=10), save(proxies, filename='proxies.txt')) loop = asyncio.get_event_loop() loop.run_until_complete(tasks)
def getProxies(self, loop, guiStatusElem): asyncio.set_event_loop(loop) # loop = asyncio.new_event_loop() # asyncio.set_event_loop(asyncio.new_event_loop()) # loop = asyncio.get_event_loop() self.proxies = asyncio.Queue() # broker = Broker( # proxies, timeout=8, max_conn=200, max_tries=3, verify_ssl=False, # judges=judges, providers=providers, loop=loop) broker = Broker( self.proxies, timeout=8, max_conn=200, max_tries=3, verify_ssl=True, judges=self.judges ) tasks = asyncio.gather( broker.find( types=self.types, countries=self.countries, strict=True, limit=self.proxyCount ), self.fetchProxies(guiStatusElem) ) proxyList = loop.run_until_complete(tasks) # broker.show_stats(verbose=True) return proxyList[1]
def get_proxy_list(): import asyncio import datetime import os from proxybroker import Broker from util.constants import BASE_DAG_DIR async def get_list(proxies, proxy_list): start_time = datetime.datetime.now() while True: proxy = await proxies.get() if (datetime.datetime.now() - start_time).seconds / 3600 >= 1: print("!!!", "Timeout!", datetime.datetime.now()) break if proxy is None: break proxy_list.append(f"http://{proxy.host}:{proxy.port}\n") proxies = asyncio.Queue() broker = Broker(proxies) proxy_list = [] tasks = asyncio.gather( broker.find(types=[('HTTP', ('Anonymous', 'High')), ('HTTPS', ('Anonymous', 'High'))], limit=100), get_list(proxies, proxy_list)) loop = asyncio.get_event_loop() loop.run_until_complete(tasks) with open(os.path.join(BASE_DAG_DIR, "proxy_list.txt"), "w") as f: for proxy in proxy_list: f.write(f"{proxy}\n")
def main(): proxies = asyncio.Queue() broker = Broker(proxies) tasks = asyncio.gather(broker.find(types=['HTTP', 'HTTPS'], limit=100), save(proxies, filename='proxies.txt')) loop = asyncio.get_event_loop() loop.run_until_complete(tasks)
def gather_proxies(): proxy_list = list() types = ['HTTP'] try: loop = asyncio.get_event_loop() except RuntimeError: logger("----New event loop") loop = asyncio.new_event_loop() proxies = asyncio.Queue(loop=loop) broker = Broker(proxies, loop=loop) loop.run_until_complete(broker.find(limit=100, types=types)) with open("{}/temp.csv".format(static_path), "w"): pass # if file is present overwrite it while True: proxy = proxies.get_nowait() if proxy is None: break logger(str(proxy)) with open("{}/temp.csv".format(static_path), "a") as temp: temp.write("{}\t{}\n".format( proxy.geo["code"], "{}:{}".format(proxy.host, str(proxy.port)))) proxy_list.append( [proxy.geo["code"], "{}:{}".format(proxy.host, str(proxy.port))]) return proxy_list
def generate_proxies(): proxies = asyncio.Queue() broker = Broker(proxies) tasks = asyncio.gather(broker.find(types=['HTTP', 'HTTPS'], limit=200), save(proxies, filename=proxies_file_path)) loop = asyncio.get_event_loop() loop.run_until_complete(tasks)
def get_one_specific_proxy(country, types='HTTP'): """ Find one new, working proxy from the specified country. Run time of this method depends heavily on the country specified as for some countries it is hard to find proxies (e.g. Myanmar). :author: Sebastian :param country: Two-letter ISO formatted country code. If a lookup is needed before calling this method, please consult /static/country_codes.csv. :param types: The type of proxy to search for as a list of strings. Defaults to HTTP. If only one type should be specified a string like "HTTPS" will also work. Other possibilities are HTTPS, SOCKS4, SOCKS5. E.g. types=['HTTP, HTTPS'] :return: A string containing the newly found proxy from the specified country in <Proxy IP>:<Port> notation. :raises RuntimeError if proxybroker has problems with its loop. Catch it and start new event_loop. """ logger("Fetching one proxy from: {}".format(country)) if type(types) is not list: types = [types] loop = asyncio.get_event_loop() proxies = asyncio.Queue(loop=loop) broker = Broker(proxies, loop=loop) loop.run_until_complete( broker.find(limit=1, countries=[country], types=types)) while True: proxy = proxies.get_nowait() if proxy is None: break fetched_proxy = "{}:{}".format(proxy.host, str(proxy.port)) logger("Proxy from {} is: {}".format(country, fetched_proxy)) _add_to_proxy_list(country, fetched_proxy) return fetched_proxy return None
def get_proxy(): proxies = asyncio.Queue() broker = Broker(proxies) tasks = asyncio.gather(broker.find(types=['HTTP', 'HTTPS'], limit=15), show(proxies)) loop = asyncio.get_event_loop() proxy = loop.run_until_complete(tasks) return choice(proxy[-1])
def fill_proxy(country_code): loop = asyncio.get_event_loop() proxies = asyncio.Queue(loop=loop) broker = Broker(proxies, loop=loop) asyncio.gather( broker.find(types=['HTTP', 'HTTPS', 'CONNECT:80'], countries=[country_code], limit=1), update_proxy(proxies))
async def fetch_proxies(numProxies): result = [] result.clear() proxies = asyncio.Queue() broker = Broker(queue=proxies, verify_ssl=True) await asyncio.gather( broker.find(types=[('HTTP', 'High')], limit=numProxies, strict=True), show(proxies, result)) return result
def proxy_saver(self): proxies = asyncio.Queue() broker = Broker(proxies) tasks = asyncio.gather( broker.find(types=['HTTP', 'HTTPS'], limit=self.limit), self.save(proxies, filename=self.filename), ) loop = asyncio.get_event_loop() loop.run_until_complete(tasks)
def get_proxies(proxy_type=['HTTPS']): proxies = asyncio.Queue() broker = Broker(proxies) tasks = asyncio.gather( broker.find(types=proxy_type, limit=20), __get_proxies(proxies)) loop = asyncio.get_event_loop() proxy_list = loop.run_until_complete(tasks)[-1] return proxy_list
def get_proxies_from_broker(): print(f'Broker fills proxy pool') proxies = asyncio.Queue() broker = Broker(proxies) tasks = asyncio.gather(broker.find(types=['HTTP', 'HTTPS'], limit=10), save(proxies, filename='proxies.txt')) loop = asyncio.get_event_loop() loop.run_until_complete(tasks) print(f'Proxy pool filled with Broker')
def get_proxy(): proxies = asyncio.Queue() broker = Broker(proxies) proxy_list = [] tasks = asyncio.gather(broker.find(types=['HTTPS'], limit=10), show(proxies, proxy_list)) loop = asyncio.get_event_loop() loop.run_until_complete(tasks) return proxy_list
def main(): proxies = asyncio.Queue() broker = Broker(proxies) tasks = asyncio.gather(broker.find(types=['HTTP', 'HTTPS'], limit=5000), save(proxies)) loop = asyncio.get_event_loop() loop.run_until_complete(tasks) print(proxy_l) print(len(proxy_l))
def main(): proxies = asyncio.Queue() broker = Broker(proxies) tasks = asyncio.gather(broker.find(types=['HTTP', 'HTTPS'], limit=99999), save(proxies, filename='proxies.txt')) loop = asyncio.get_event_loop() loop.run_until_complete(tasks) for line in fileinput.input(['./proxies.txt'], inplace=True): sys.stdout.write('HTTP|{l}'.format(l=line))
def main(): proxies = asyncio.Queue() broker = Broker(proxies, judges=["smtp://smtp.gmail.com"], max_tries=1) # Check proxy in spam databases (DNSBL). By default is disabled. # more databases: http://www.dnsbl.info/dnsbl-database-check.php dnsbl = [ "bl.spamcop.net", "cbl.abuseat.org", "dnsbl.sorbs.net", "zen.spamhaus.org", "bl.mcafee.com", "spam.spamrats.com", ] tasks = asyncio.gather( broker.find(types=["CONNECT:25"], dnsbl=dnsbl, limit=10), save(proxies, filename="proxies.txt") ) loop = asyncio.get_event_loop() loop.run_until_complete(tasks)
def main(): host, port = '127.0.0.1', 8888 # by default loop = asyncio.get_event_loop() types = [('HTTP', 'High'), 'HTTPS', 'CONNECT:80'] codes = [200, 301, 302] broker = Broker(max_tries=1, loop=loop) # Broker.serve() also supports all arguments that are accepted # Broker.find() method: data, countries, post, strict, dnsbl. broker.serve(host=host, port=port, types=types, limit=10, max_tries=3, prefer_connect=True, min_req_proxy=5, max_error_rate=0.5, max_resp_time=8, http_allowed_codes=codes, backlog=100) urls = ['http://httpbin.org/get', 'https://httpbin.org/get', 'http://httpbin.org/redirect/1', 'http://httpbin.org/status/404'] proxy_url = 'http://%s:%d' % (host, port) loop.run_until_complete(get_pages(urls, proxy_url)) broker.stop()
"""Find and show 10 working HTTP(S) proxies.""" import asyncio from proxybroker import Broker async def show(proxies): while True: proxy = await proxies.get() if proxy is None: break print('Found proxy: %s' % proxy) proxies = asyncio.Queue() broker = Broker(proxies) tasks = asyncio.gather( broker.find(types=['HTTP', 'HTTPS'], limit=10), show(proxies)) loop = asyncio.get_event_loop() loop.run_until_complete(tasks)