Exemplo n.º 1
0
 async def _async_context(self, formatter_name):
     pm = ProxyManager(formatter_name=formatter_name)
     pm.load_from_txt("test_proxies.txt", "login:password@ip:port")
     proxies = []
     for i in range(10):
         async with pm.get() as proxy:
             proxies.append(proxy)
     return proxies
Exemplo n.º 2
0
class ProxyMiddleware(object):

    def __init__(self):
        self.proxy_manager = ProxyManager("proxy_list.txt", 6)

    def process_request(self, request, spider):
        request.meta["proxy"] = self.proxy_manager.get_proxy()
Exemplo n.º 3
0
 def monitor(self, sleepTime=30):
     proxyManager = ProxyManager('proxies.txt')
     self._productsSeen = []
     self.getTopNProducts(
         1
     )  # publish the latest product seen at the start of the monitoring
     r = requests.get(self.apiLink)
     jsonObj = r.json()
     objects = jsonObj['objects']
     for obj in objects:
         slug, title, imgURL = self._parseProperties(obj)
         self._productsSeen.append(title)
     while (True):
         try:
             _flush('Looking for products')
             proxy = proxyManager.random_proxy()
             _flush('Using proxies %s' % proxy.get_dict())
             r = requests.get(self.apiLink, proxies=proxy.get_dict())
             jsonObj = r.json()
             objects = jsonObj['objects']
             for obj in objects:
                 slug, title, imgURL = self._parseProperties(obj)
                 if title in self._productsSeen:
                     continue
                 print('New product found :-D')
                 self._productsSeen.append(title)
                 product = self._getProduct(slug)
                 price, currency, sizes, method, releaseDate = self._getProductInfo(
                     product)
                 self.webhook.send(embed=self._createEmbed(
                     slug, title, imgURL, price, currency, sizes, method,
                     releaseDate))
                 _flush('Found new product!')
         except Exception as err:
             _flush('Encountered some exception')
             _flush(repr(err))
         finally:
             _flush('Sleeping for %ss, will query for products once done' %
                    sleepTime)
             time.sleep(sleepTime)
Exemplo n.º 4
0
class ApiWrapper(object):
    BASE_URL = None
    DEFAULT_HEADERS = {
        'User-Agent':
        'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36'
    }

    def __init__(self, proxy_file_path=None):
        self.__initialize_session()
        self.proxy_manager = ProxyManager(proxy_file_path)

    def __initialize_session(self):
        self.session = requests.session()
        self.session.headers.update(self.DEFAULT_HEADERS)

    def get_random_proxy(self):
        return self.proxy_manager.random_proxy().get_dict()

    def make_url(self, endpoint):
        is_full_url = endpoint.startswith('http')
        if not is_full_url and not self.BASE_URL:
            raise ValueError('Must set BASE_URL')
        return endpoint if is_full_url else self.BASE_URL + str(endpoint)

    def request(self, method, url, **kwargs):
        proxy = kwargs.get('proxies', self.get_random_proxy())
        return self.session.request(method=method,
                                    url=self.make_url(url),
                                    proxies=proxy,
                                    **kwargs)

    def get(self, url, params=None, **kwargs):
        return self.request('get', url, params=params, **kwargs)

    def post(self, url, data=None, json=None, **kwargs):
        return self.request('post', url, data=data, json=json, **kwargs)

    def put(self, url, data=None, **kwargs):
        return self.request('put', url, data=data, **kwargs)

    def patch(self, url, data=None, **kwargs):
        return self.request('patch', url, **kwargs)

    def delete(self, url, **kwargs):
        return self.request('delete', url, **kwargs)
 def __init__(self):
     self.proxy_manager = ProxyManager("proxy_list.txt", 6)
class ProxyMiddleware(object):
    def __init__(self):
        self.proxy_manager = ProxyManager("proxy_list.txt", 6)

    def process_request(self, request, spider):
        request.meta["proxy"] = self.proxy_manager.get_proxy()
Exemplo n.º 7
0
from discord_webhook import webhook  #line:1
import proxymanager  #line:2
import requests  #line:3
from bs4 import BeautifulSoup  #line:4
import json  #line:5
import cloudscraper  #line:6
import time  #line:7
import logging  #line:8
import threading  #line:9
from proxymanager import ProxyManager  #line:10

proxy_manager = ProxyManager('proxies.txt')  #line:12
from discord_webhook import DiscordWebhook, DiscordEmbed  #line:14
from faker import Faker  #line:16

fake = Faker("en_US")  #line:17
import random  #line:18
from random import randint  #line:19
import string  #line:21

proxy_manager = ProxyManager('proxies.txt')  #line:23
import string  #line:24

threads = input("How many threads? ")  #line:26


class getTHATBREADDDDDDDD:  #line:27
    def __init__(O00OOOOO0OO0O000O, O000OO0000000O00O):  #line:36
        pass  #line:37

    def start():  #line:40
Exemplo n.º 8
0
class ProxyThread(object):

    def __init__(self, conn, client_addr):
        print("Thread successfully created")
        self.proxy_manager = ProxyManager()
        self.client = conn
        self.client_id = client_addr
        self.init_thread()

    def init_thread(self):
        print("waiting")
        while True:
            recieve_info = self.client.recv(4096)
            try:
                deserialize = pickle.loads(recieve_info)
                process = self.processrequest(deserialize)

                if process is not None:
                    send_data = process
                    self._send(send_data)
                lock.release()
            except EOFError:
                print('no data recieved')
                break

    def _send(self, data):
        serialize = pickle.dumps(data)
        self.client.send(serialize)

    def processrequest(self, request):
        lock.acquire()
        if request['mode'] == 'admindata':
            return self.getadmindata()

        elif request['mode'] == 'addadmin':
            user = request['username']
            passw = request['password']
            if user is not None and passw is not None:
                self.proxy_manager.addadmin(user, passw)

        elif request['mode'] == 'isadmin':
            user = request['username']
            passw = request['password']
            if user is not None and passw is not None:
                return {'isadmin': self.proxy_manager.isadmin(user, passw)}
        elif request['mode'] == 'addblocked':
            url = request['url']
            if url is not None:
                self.proxy_manager.addblocked(url)

        elif request['mode'] == 'addadminsite':
            url = request['url']
            self.proxy_manager.adminsites.append(url)

        elif request['mode'] == 'isadminsite':
            url = request['url']
            if url in self.proxy_manager.adminsites:
                return True
            else:
                return False

        elif request['mode'] == 'addmanager':
            user = request['username']
            passw = request['password']
            if user is not None and passw is not None:
                self.proxy_manager.proxy_man.append(
                    {'username': user, 'password': passw})

        elif request['mode'] == 'isman':
            user = request['username']
            passw = request['password']
            if user is not None and passw is not None:
                return {'isman': self.proxy_manager.isman(user, passw)}

        elif request['mode'] == 'clear_cache':wn a company like Koru Works, your daily driver is pretty much your business card. Tyler Clayton gets this through and through, but that's not the reason he got into this 1jz swapped, clean paint havin' Toyota Cressida. Nah, he got into it because it's got four doors, good looks, but most importantly - durability. Slap some tires on top of this thing and go wild at a drift event. Then drive back home. No trailer necessary. Our kinda dude. 

            self.proxy_manager.clearcache()

        elif request['mode'] == 'geturl':
            return self.handlerequests(request)
Exemplo n.º 9
0
# proxymanager makes using proxies really easy
# requests is needed so that we can make requests to the www!
from proxymanager import ProxyManager
from datetime import datetime
import requests
import json
import time

# this api url is a given. normally you will have to do some "hunting" for api URLs on certain sites.
url = 'https://frenzy.shopifyapps.com/api/flashsales'

# a list that will store all current item ids that were found
items = []

# import proxies from your proxies.txt file through the proxymanager package
proxy_manager = ProxyManager('proxies.txt')

# initialize the data for the site, get all the current ids
def initialize(url):
    proxydict = proxy_manager.next_proxy()
    proxies = proxydict.get_dict()
    try:
        # send a request and get all the data from this URL
        r = requests.get(url, proxies=proxies)
        data = json.loads(r.text)
        
        # you will have to know how to read through json or dicts in python
        # in order to traverse through the data. you can find examples online
        for sale in data['flashsales']:
            # storing all ids found in the URL to our list of items
            id = sale['id']
Exemplo n.º 10
0
      '-----------@Monitor-Beta------------')
print(' [ ' + time.strftime('%H:%M:%S') + ' ] ' +
      '------------------------------')

false = False
true = True

availability = true
#Headers for the requests
headers = {
    "User-Agent":
    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36"
}

#Initializes the ProxyManager, proxies.txt is the file with proxies in it
proxylist = ProxyManager('proxies.txt')

#URL Bot will be requesting
url = 'https://amnotify.com/api/stock/available'


#Define the monitor function
def monitor():
    proxy = proxylist.next_proxy(
    )  #Gets next proxy from the proxy list, this way it cycles in order
    proxies = proxy.get_dict(
    )  #Makes the 'Proxy Object(proxy) usable for an HTTP Request
    '''proxies are always stored like this in a dictionary
    {
        'http' : proxy,
        'https' : proxy,
Exemplo n.º 11
0
from proxymanager import ProxyManager
from fake_useragent import UserAgent
from fake_useragent.errors import FakeUserAgentError
import sys
import names
import cloudscraper
from OpenSSL.SSL import Error as OSSLError
from polling import TimeoutException as PTE
import json

api_key = '2captcha'

if len(sys.argv) != 3:
    print("format: test.py <proxies>")
    sys.exit(1)
p = ProxyManager(sys.argv[2])
if len(p.proxies) == 0:
    p = ProxyManager()
s = cloudscraper.create_scraper(delay=10,
                                recaptcha={
                                    'provider': '2captcha',
                                    'api_key': api_key,
                                    'proxy': True
                                })

try:
    ua = UserAgent(verify_ssl=False, use_cache_server=False)
    useragent = ua.chrome
except FakeUserAgentError as e:
    useragent = "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML like Gecko) Chrome/44.0.2403.155 Safari/537.36"
Exemplo n.º 12
0
    LOG_FORMAT = "%(asctime)s %(levelname)s: %(message)s"
    logging.basicConfig(format=LOG_FORMAT, level=logging.DEBUG)

    # ==This is what you need to construct a ProxyManager==
    proxies = [
        'http://1.1.1.1:80', 'http://2.2.2.2:80', 'http://3.3.3.3:80',
        'socks5://4.4.4.4:80'
    ]
    # limited_url without prefix 'http://' or 'https://' means
    # there is no difference regarding access rate control
    # between the two.
    limited_urls = [('^https{0,1}://www\.baidu\.com/link', 2),
                    ('^https{0,1}://www\.baidu\.com/test', 2),
                    ('^https{0,1}://www\.baidu\.com(?!/link|/test)', 2),
                    ('^http://www\.csdn\.com', 2)]
    proxymgr = ProxyManager(proxies, limited_urls)
    # ==End==

    # The following performs 20 concurrent tasks
    # that each requests a proxy from $proxymgr
    # to visit a url in $domains
    domain1 = "http://www.baidu.com/test"
    domain2 = "http://www.baidu.com/link"
    domain3 = "http://www.baidu.com"
    domain4 = "http://www.baidu.com/hello"
    domain5 = "https://www.csdn.com"
    domains = [
        domain1, domain2, domain1, domain1, domain1, domain2, domain3, domain2,
        domain2, domain2, domain2, domain2, domain2, domain2, domain4, domain3,
        domain1, domain1, domain1, domain2, domain5, domain3, domain3, domain4
    ]
Exemplo n.º 13
0
import json
from GUIAMULET import *
from threading import Thread
import subprocess
import time
from pyamf import sol
import sys
from proxymanager import ProxyManager
from amulet import *
app = wx.App(False)
frame1 = MyFrame2(None)
frame2 = MyFrame1(None)
frame3 = ProxyManager()
d = dict(sol.load('items.sol'))
d['1000 cents'] = '1000cents'
d['300 cents'] = '300cents'
d['50 cents'] = '50cents'
d['10 cents'] = '10cents'
d['100 cents'] = '100cents'
d['30 cents'] = '30cents'
keys = d.keys()
keys = sorted(keys)
m_checkList2 = wx.CheckListBox(frame2, wx.ID_ANY, wx.DefaultPosition,
                               wx.DefaultSize, keys, 0)
frame2.bSizer1.Add(m_checkList2, 1, wx.ALL | wx.EXPAND, 5)
frame2.Layout()
frame2.m_checklist = m_checkList2
itemsarray = {}
idsarray = d
for q in d:
    itemsarray[d[q]] = q
Exemplo n.º 14
0
def create():
    global session

    useProxies = config['useproxies']

    proxy_manager = ProxyManager('proxies.txt')

    if useProxies:
        random_proxy = proxy_manager.random_proxy()
        proxee = random_proxy.get_dict()
    else:
        proxee = None

    if config['userealname']:
        fName = config['firstname']
        lName = config['lastname']
    else:
        fName = names.get_first_name()
        lName = names.get_last_name()

    email = names.get_first_name() + names.get_last_name() + config['catchall']

    url = 'https://undefeated.com/account'

    payload = {
        'form_type': 'create_customer',
        'utf8': '✓',
        'customer[first_name]': fName,
        'customer[last_name]': lName,
        'customer[email]': email,
        'customer[password]': config['password']
    }

    headers = {
        'User-Agent':
        'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36',
        'Upgrade-Insecure-Requests': '1'
    }

    with logger.printLock:
        print(
            time.strftime("[%H:%M:%S]") + Fore.CYAN +
            'Grabbing cookies from home page')
    session.get('https://undefeated.com', proxies=proxee, headers=headers)

    with logger.printLock:
        print(time.strftime("[%H:%M:%S]") + Fore.YELLOW + 'Signing up')
    req = session.post(url,
                       data=payload,
                       headers=headers,
                       proxies=proxee,
                       allow_redirects=False)

    if req.text == '<html><body>You are being <a href="https://undefeated.com/challenge">redirected</a>.</body></html>':
        with logger.printLock:
            print(
                time.strftime("[%H:%M:%S]") + Fore.RED +
                'Error creating account, possibly captcha')
    else:
        with logger.printLock:
            print(
                time.strftime("[%H:%M:%S]") + Fore.GREEN +
                "Successful account creation using %s" % (email))
            with open("undefeatedaccts.txt", "a+") as f:
                f.write(email + ':' + config['password'] + "\n")
import requests
from bs4 import BeautifulSoup as bs
import logging
import http.client
import time
from random import randint
import json
from proxymanager import ProxyManager
from fake_useragent import UserAgent

ua = UserAgent()
proxy_manager = ProxyManager('proxies.txt')


def getCurrentTime():
    return time.strftime("[%H:%M:%S]--------")


gmail = "@gmail.com"  #dont change

with open('config.json') as json_data_file:
    config = json.load(json_data_file)

CONFIG = config["CONFIG"]
beggmail = CONFIG["beggmail"]
first_name = CONFIG["first_name"]
last_name = CONFIG["last_name"]
password = CONFIG["password"]

print("{}Config Loaded".format(getCurrentTime()))
Exemplo n.º 16
0
 def __init__(self, conn, client_addr):
     print("Thread successfully created")
     self.proxy_manager = ProxyManager()
     self.client = conn
     self.client_id = client_addr
     self.init_thread()
Exemplo n.º 17
0
import requests
import time
from proxymanager import ProxyManager
import json

print("### TASKBOT RESTOCK MONITOR (v1.01) ###")
print("### BY @hasterestocks ###")
print('\n')

proxy_manager = ProxyManager('proxies.txt')
s = requests.session()

webhook = input("What is your webhook URL?\n")

headers = {
    'Accept-Encoding': 'gzip, deflate, br',
    'Accept-Language': 'en-US,en;q=0.9',
    'Upgrade-Insecure-Requests': '1',
    'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.167 Safari/537.36',
    'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
}

while True:
    while True:
        try:
            proxydict = proxy_manager.next_proxy()
            proxies = proxydict.get_dict()

            try:
                response = requests.get("https://aiomacbot.myshopify.com/products.json", headers=headers, timeout=5, proxies=proxies)
                data = json.loads(response.text)
Exemplo n.º 18
0
#############################################################################
###########################################################################

proxi2 = (os.path.join(sys.path[0]) + '/proxy.txt')
pr = open(proxi2, 'r')
pix = list()
for line in pr:
    pix.append(pr.readline().replace('\n', ''))
#print(pix)
prox = {
    'http': 'http://{}'.format(random.choice(pix)),
    #'http': 'http://138.68.182.14:8080',
    #'https': 'http://163.172.86.64:3128',
}
#=============================================
proxi = ProxyManager(os.path.join(sys.path[0]) + '/\proxy.txt')
#print('proxy read')
#proxy = str(random.choice(pix))

############################################################
ua = fake_useragent.UserAgent(cache=True,
                              verify_ssl=False,
                              use_cache_server=False)
#print(ua.random)

comboo = (os.path.join(sys.path[0]) + '/combo.txt')
combo_path = comboo
fp = open(combo_path, 'r')
fp1 = open(combo_path, 'r')
lines = len(fp1.readlines())
linecount = 0
def nike_web(code1, code2, main_webhook_1, sleep, keywords):

    from random_user_agent.user_agent import UserAgent
    from random_user_agent.params import SoftwareName, OperatingSystem

    url = f"https://api.nike.com/product_feed/threads/v2/?filter=marketplace%28{code1}%29&filter=language%28{code2}%29&filter=channelId%28d9a5bc42-4b9c-4976-858a-f159cf99c647%29"
    proxy_manager = ProxyManager('proxies.txt')
    main_webhook = main_webhook_1
    sserver_logs = "server log webhook"

    software_names = [SoftwareName.CHROME.value]
    operating_systems = [
        OperatingSystem.WINDOWS.value, OperatingSystem.LINUX.value
    ]
    user_agent_rotator = UserAgent(software_names=software_names,
                                   operating_systems=operating_systems,
                                   limit=100)

    items = []

    def monitor():

        try:

            headers = {
                'upgrade-insecure-requests': '1',
                'cache-control': 'no-cache',
                'Pragma': 'no-cache',
                'user-agent': user_agent_rotator.get_random_user_agent(),
                'accept':
                'application/xhtml+xml,text/html,application/xml;q=0.9,image/apng,image/webp,*/*;q=0.8,application/signed-exchange;v=b3',
                'sec-fetch-site': 'none',
                'accept-encoding': 'gzip, deflate, br',
                'accept-language': 'en-US,en;q=0.9'
            }

            session = requests.Session()
            session.headers = OrderedDict(headers)

            proxydict = proxy_manager.next_proxy()
            proxies = proxydict.get_dict()
            r = session.get(url=url, proxies=proxies)
            data = r.json()['objects']
            for x in data:
                id = x['id']
                items.append(id)

            log.warning(f"{len(items)} products loaded on site")
            log.info(f'Initialized Nike {code1} Web Monitor')

        except Exception as e:
            log.error(str(e))

    def monitor():

        while True:
            try:

                proxydict = proxy_manager.next_proxy()
                proxies = proxydict.get_dict()

                headers = {
                    'upgrade-insecure-requests': '1',
                    'cache-control': 'no-cache',
                    'Pragma': 'no-cache',
                    'user-agent': user_agent_rotator.get_random_user_agent(),
                    'sec-fetch-mode': 'navigate',
                    'sec-fetch-user': '******',
                    'accept':
                    'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3',
                    'sec-fetch-site': 'none',
                    'accept-encoding': 'gzip, deflate, br',
                    'accept-language': 'en-US,en;q=0.9'
                }

                session.headers = OrderedDict(headers)
                r = session.get(url=url, proxies=proxies)
                data = r.json()['objects']

                for x in data:
                    id = x['id']
                    if id not in items:
                        title = x['productInfo'][0]['productContent'][
                            'fullTitle']
                        link = f"https://www.nike.com/{code1.lower()}/t/{x['publishedContent']['properties']['seo']['slug']}"
                        image_url = x['productInfo'][0]['imageUrls'][
                            'productImageUrl']
                        price = x['productInfo'][0]['merchPrice'][
                            'currentPrice']
                        currency = x['productInfo'][0]['merchPrice'][
                            'currency']

                        sizes = []
                        stock_level = []
                        for y in x['productInfo'][0]['skus']:
                            sizes.append(y['nikeSize'])

                        for y in x['productInfo'][0]['availableSkus']:
                            stock_level.append(y['level'])

                        final = ""
                        for i in range(len(sizes)):
                            final = final + sizes[
                                i] + f" - [{stock_level[i]}]" + "\n"

                        webhook = DiscordWebhook(url=main_webhook)
                        embed = DiscordEmbed(
                            title=f"Nike {code1} Web [BACKEND]",
                            description=f"[{title}]({link})",
                            color=0xFF7F50)
                        embed.set_thumbnail(url=image_url)
                        embed.add_embed_field(
                            name="Price",
                            value=f"${str(price)} {currency}",
                            inline=True)  #Need to check api to determine
                        embed.add_embed_field(name="Possible Sizes InStock",
                                              value=final,
                                              inline=True)
                        embed.add_embed_field(
                            name="Useful links: ",
                            value=
                            f"[Cart](https://secure-store.nike.com/{code1.lower()}/checkout/html/cart.jsp) | [Region Switch](https://www.nike.com/?geoselection=true) | [Shopback](https://www.shopback.sg) | [BuyAndShip](https://www.buyandship.com.sg/)",
                            inline=False)
                        embed.set_footer(
                            text=
                            f'DogeSolutions • {time.strftime("%H:%M:%S", time.localtime())} SGT',
                            icon_url=
                            'https://pbs.twimg.com/profile_images/1128853846753546240/CB8smmAP_400x400.jpg'
                        )
                        webhook.add_embed(embed)
                        webhook.execute()

                        items.append(id)
                        log.info(f"Item sent to discord! - [{code1}]")

                        log.warning("Checking for KWs")
                        hit = ""
                        for i in keywords:
                            if i in title.lower():
                                hit = hit + i + "\n"

                        if hit != "":
                            webhook = DiscordWebhook(
                                url=main_webhook,
                                content=
                                f'@everyone Keyword detected!```{hit}```')
                            webhook.execute()
                            log.info("Keyword hit!")
                        else:
                            log.warning("No keywords detected")

                time.sleep(sleep_time)
                log.warning(f'Scraping Nike Web - [{code1}]')

            except Exception as e:
                log.error(str(e))
Exemplo n.º 20
0
from fake_useragent import UserAgent
from fake_useragent.errors import FakeUserAgentError
import sys
import names
import cloudscraper
from OpenSSL.SSL import Error as OSSLError
from polling import TimeoutException as PTE
import json


password = "******"

if len(sys.argv) != 3:
    print("format: python nakedcph_acct_gen.py <emails> <proxies>")
    sys.exit(1)
p = ProxyManager(sys.argv[2])
if len(p.proxies) == 0:
    p = ProxyManager()


while len(emails) > 0:
    email = emails[0]
    if len(email) == 0:
        emails.remove(email)
        continue
    s.cookies.clear()
    print("Creating account for {}".format(email))
    proxy = p.random_proxy().get_dict()

    url = 'https://www.nakedcph.com'
    h = headers
Exemplo n.º 21
0
from GUIVOTE import *
from threading import Thread
from proxymanager import ProxyManager
import subprocess
import os
import json
import sys
import time
from scout import *
thrd = None
app = wx.App(False)
frame = MyFrame2(None)
frame2 = ProxyManager()
try:
    frame.m_textCtrl6000.SetValue("30")
    config = json.loads(open('voteconfig.json', 'r').read())
    if 'server' in config:
        frame.m_textCtrl6.SetValue(config['server'])
    if 'declarewar' in config:
        frame.m_textCtrl8.SetValue(config['declarewar'])
    if 'votername' in config:
        frame.m_textCtrl7.SetValue(config['votername'])
    if "totalconnections" in config:
        frame.m_textCtrl41.SetValue(str(config['totalconnections']))
    if "maxchecks" in config:
        frame.m_textCtrl600.SetValue(str(config["maxchecks"]))
    if "usebroker" in config:
        frame.m_checkBox1.SetValue(config["usebroker"])
    if "timeout" in config:
        frame.m_textCtrl6000.SetValue(str(config["timeout"]))
    if "maxperproxy" in config:
Exemplo n.º 22
0
 def __init__(self):
     self.proxy_manager = ProxyManager("proxy_list.txt", 6)
Exemplo n.º 23
0
def loadProxy():
    proxylist = ProxyManager('proxies.txt')
    proxy = proxylist.next_proxy()
    proxies = proxy.get_dict()
    return proxies
Exemplo n.º 24
0
def generator():
    global session
    s = requests.Session()

    first = names.get_first_name(gender= 'male') # random first name
    last = names.get_last_name() # random last name
    catchall = '@gmail.com' # input your catchall
    password = '******' # input a password
    random_number = random.randint(1,10000)
    email = last+f'{random_number}'+catchall

    proxym = ProxyManager('proxies.txt') # create a proxies.txt file and paste in proxies
    proxyr = proxym.random_proxy()
    proxyf = proxyr.get_dict()

    info = {
    'form_type': 'create_customer',
    'utf8': '✓',
    'customer[first_name]': first,
    'customer[last_name]': last,
    'customer[email]': email,
    'customer[password]': password
    }

    submit_info = s.post(url, data=info, headers=headers, proxies=proxyf) # submits first request

    if submit_info.url == 'https://cncpts.com/': # if account was submitted then this will be the site after the request
        print('Successfully signed up!')
        print(f'{email}:{password}')

    else:
        print('Captcha needed, submitting now...') # otherwise you need a captcha
        url_cap = s.get('https://cncpts.com/challenge')
        soup = BeautifulSoup(url_cap.content, 'html.parser')
        auth_val = soup.findAll("input", {"name": "authenticity_token"}) # grabs hidden authenticity token from source
        auth_final = auth_val[0]["value"]

        api_key = '' # api key from anticaptcha
        site_key = '6LeoeSkTAAAAAA9rkZs5oS82l69OEYjKRZAiKdaF' # site key from concepts
        cap_url = submit_info.url

        client = AnticaptchaClient(api_key)
        task = NoCaptchaTaskProxylessTask(cap_url, site_key)
        job = client.createTask(task)
        job.join()
        response = job.get_solution_response() # grabs token from anticaptcha

        cap_info = {
        'utf8': '✓',
        'authenticity_token': auth_final,
        'g-recaptcha-response': response
        }

        cap_headers = {
        'Content-Type': 'application/x-www-form-urlencoded',
        'Referer': 'https://cncpts.com/challenge',
        'Upgrade-Insecure-Requests': '1',
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36'
        }

        submit_captcha = s.post(url, data=cap_info, headers=cap_headers, proxies=proxyf) # submits second request with captcha

        if submit_captcha.url == 'https://cncpts.com/': # if account was submitted then this will be the site after the request
            print('Captcha successfully submitted!')
            print(f'{email}:{password}')

        else:
            print('Account signup unsuccessful, please try again.') # otherwise there was a problem with the captcha
Exemplo n.º 25
0
 def __init__(self, proxy_file_path=None):
     self.__initialize_session()
     self.proxy_manager = ProxyManager(proxy_file_path)
import random
import datetime
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from subprocess import Popen, PIPE

me = "*****@*****.**"
you = "*****@*****.**"

home = str(Path.home())
dirpath = os.getcwd()

print(datetime.datetime.now())

ua = UserAgent()
proxy_manager = ProxyManager('{}/proxies.txt'.format(home))
random_proxy = proxy_manager.random_proxy()
proxies = random_proxy.get_dict()
print(proxies)

session = requests.Session()
session.headers = {'User-Agent': ua.random}

product_link_list = [
    'https://www.ssense.com/en-us/men/product/nike/white-off-white-edition-air-presto-sneakers/3625319',
    'https://www.ssense.com/en-us/men/product/nike/black-off-white-edition-air-presto-sneakers/3456739',
    'https://www.ssense.com/en-us/men/product/yeezy/grey-boost-700-sneakers/3676879',
    'https://www.ssense.com/en-us/women/product/yeezy/grey-boost-700-sneakers/3677059',
    'https://www.ssense.com/en-us/men/product/y-3/black-futurecraft-runner-4d-ii-sneakers/3131628',
    'https://www.ssense.com/en-us/men/product/nike/beige-off-white-edition-the-ten-blazer-mid-sneakers/3685649',
    'https://www.ssense.com/en-us/men/product/nikelab/black/3685639d',