Пример #1
0
    except (ProxyError, Timeout, ConnectionError) as e:
        print()
        print(e)
        print(f"Will retry ({retry}) more times.")
        print()

    if retry > 0:
        retry -= 1
    else:
        return None

    return get_retry_loop(url, proxies, retry=retry)


last_mexssage_id = None
with RequestsWhaor(onion_count=PROXY_COUNT) as requests_whaor:
    print("Whats a websocket?")
    while True:
        mexssage = get_retry_loop(URL, requests_whaor.rotating_proxy)

        # if mexssage.channelID != 1:
        # continue

        if mexssage.id == last_mexssage_id:
            continue

        else:
            last_mexssage_id = mexssage.id
            print(mexssage.json(indent=4))
Пример #2
0
    )  # Print new line when complete just to make things look a little cleaner.

watchlist = sorted(
    set(map(lambda cashtag: cashtag.replace("$", "").strip(), watchlist)))
# Lets sort, remove duplicates, and clean '$' strings from each symbols.

valid_symbols = [
]  # Used to store symbols validated with the fuzzy_search function.
call_chains = []  # Used to store all the found call option chains.

# Decide on how many threads and proxies your computer can handle
MAX_THREADS = 6
# Each proxy is a tor circuit running inside a separate docker container.
MAX_PROXIES = 6

with RequestsWhaor(onion_count=MAX_PROXIES,
                   max_threads=MAX_THREADS) as request_whaor:
    # RequestsWhaor will spin up a network of TOR nodes we will use as a rotating proxy.

    with ThreadPoolExecutor(max_workers=MAX_THREADS) as executor:
        futures = [
            executor.submit(
                fuzzy_search, ticker, session=request_whaor
            )  # ^--Here we pass request_whaor as a session like object.
            for ticker in watchlist
        ]

        for future in as_completed(futures):
            result = future.result(timeout=60)

            print(".", end="", flush=True)  # Quick progress bar.
Пример #3
0
# title: Example of making 30 requests while getting a fresh pool of ip addresses every 10 requests.

from requests_whaor import RequestsWhaor

URL = "http://jsonip.com/"

with RequestsWhaor(onion_count=5) as requests_whaor:
    for _ in range(3):
        for _ in range(10):
            resp = requests_whaor.get(URL)

            if resp:
                print(resp.text)

        requests_whaor.restart_onions()
Пример #4
0
from requests_whaor import RequestsWhaor
import requests

URL = "http://jsonip.com/"

with RequestsWhaor(onion_count=5) as requests_whaor:
    for _ in range(10):
        result = requests.get(URL, proxies=requests_whaor.rotating_proxy)
        print(result.text)

from requests_whaor import RequestsWhaor

URL = "http://jsonip.com/"

with RequestsWhaor(onion_count=5, max_retries=10) as requests_whaor:
    for _ in range(10):
        result = requests_whaor.get(URL)
        print(result.text)