Пример #1
0
def get_proxy_list(valid_proxies):
    from http_request_randomizer.requests.proxy.requestProxy import RequestProxy
    import requests
    import concurrent.futures
    req_proxy = RequestProxy() #you may get different number of proxy when  you run this at each time
    proxies_list = req_proxy.get_proxy_list() #this will create proxy list
    proxies = []
    for prox in proxies_list:
        proxies.append(prox.get_address())

    print(proxies)
    
    def extract(proxy):
        try: 
            r = requests.get('https://httpbin.org/ip', proxies={'http':"//" + proxy,'https':"//" +proxy},timeout = 1)
            print(r.json(),'-working')
            valid_proxies.append(proxy)
        except Exception as e:
            pass
        return proxy
    extract("203.115.112.218:3128")
    print("se llego")

    with concurrent.futures.ThreadPoolExecutor() as exector:
        exector.map(extract,proxies)

    return valid_proxies
Пример #2
0
def botStartChrome():
    print("give me a bottle of rum!")
    req_proxy = RequestProxy() #you may get different number of proxy when  you run this at each time
    proxies = req_proxy.get_proxy_list()
    number = random.randint(0,50)
    print("rand int ==== " + str(number))
    PROXY = proxies[number].get_address()
    print(proxies[number].get_address())
    print(proxies[number].country)
    webdriver.DesiredCapabilities.CHROME['proxy']={
        "httpProxy":PROXY,
        "ftpProxy":PROXY,
        "sslProxy":PROXY,
        
        "proxyType":"MANUAL",
        
    }
    driver = webdriver.Firefox(executable_path='chromedriver')
    try:
        driver.get('https://temanpost.com/games/daftar-game-berbayar-yang-bisa-diklaim-gratis-pada-agustus-2020/')
        time.sleep(30)
    except:
        print("error something wrong")
    
    driver.quit()
Пример #3
0
def make_request():
    req_proxy = RequestProxy(
    )  #you may get different number of proxy when  you run this at each time
    proxies = req_proxy.get_proxy_list()  #this will create proxy list
    ls = []
    for prox in proxies:
        ls.append(prox.get_address())
    save_prox(ls)
Пример #4
0
class _Proxy:
    ip = re.compile(r"\d+\.\d+\.\d+\.\d+")

    @staticmethod
    def get_ip():
        return _Proxy.ip.search(
            requests.get('http://icanhazip.com/').text).group(0)

    def __init__(self, conf, file=None):

        self.config = conf

        self.req_proxy = RequestProxy(protocol=Protocol.HTTP)
        if file is not None:
            self.proxies_list = self.config["proxies"]
        else:
            self.proxies_list = self.req_proxy.get_proxy_list()

    def get_proxy(self):
        logger = logging.getLogger(f"pid={os.getpid()}")

        while True:
            p = self.proxies_list.pop(0).get_address()

            try:
                logger.info(f"Trying {p}")
                proxy = {"http": f"http://{p}", "https": f"https://{p}"}
                ip = _Proxy.ip.search(
                    requests.get("http://icanhazip.com/",
                                 proxies=proxy,
                                 timeout=2).text)
                if ip.group(0) is None:
                    raise Exception()

                if ip.group(0) == self.get_ip():
                    raise Exception()

                if requests.get("http://google.com/", proxies=proxy,
                                timeout=5).status_code != 200:
                    raise Exception()

                return p

            except IndexError:
                logger.info(f"Loading more proxies")
                self.proxies_list = self.req_proxy.get_proxy_list()
Пример #5
0
    def __init__(self, url='https://youtube.com', proxy=None, verbose=False):
        """ init variables """

        self.url = url
        self.proxy = proxy
        self.verbose = verbose
        # All chrome options
        # https://peter.sh/experiments/chromium-command-line-switches/
        self.options = webdriver.ChromeOptions()
        # Run in headless mode, without a UI or display server dependencies
        self.options.add_argument('--headless')
        # Disables GPU hardware acceleration. If software renderer is not in
        # place, then the GPU process won't launch
        self.options.add_argument('--disable-gpu')
        # Disable audio
        self.options.add_argument('--mute-audio')
        # Runs the renderer and plugins in the same process as the browser
        self.options.add_argument('--single-process')
        # Autoplay policy
        self.options.add_argument('--autoplay-policy=no-user-gesture-required')
        if self.proxy:
            # Uses a specified proxy server, overrides system settings. This
            # switch only affects HTTP and HTTPS requests
            self.options.add_argument('--proxy-server={0}'.format(self.proxy))
        # A string used to override the default user agent with a custom one

        req_proxy = RequestProxy(
        )  #you may get different number of proxy when  you run this at each time
        proxies = req_proxy.get_proxy_list()  #this will create proxy list
        PROXY = proxies[0].get_address()
        webdriver.DesiredCapabilities.CHROME['proxy'] = {
            "httpProxy": PROXY,
            "ftpProxy": PROXY,
            "sslProxy": PROXY,
            "proxyType": "MANUAL",
        }
        self.user_agent = utils.user_agent()
        self.options.add_argument('--user-agent={0}'.format(self.user_agent))
        self.browser = webdriver.Chrome(ChromeDriverManager().install(),
                                        options=self.options)
        self.default_timeout = 20
        # Specifies the amount of time the driver should wait when trying to
        # find any element (or elements) if it is not immediately available.
        # The default setting is 0. Once set, the implicit wait is set for the
        # life of the WebDriver object.
        self.browser.implicitly_wait(self.default_timeout)
        # Set the amount of time to wait for a page load to complete before
        # throwing an error.
        # self.browser.set_page_load_timeout(self.default_timeout)
        # Set the amount of time that the script should wait during an
        # execute_async_script call before throwing an error.
        # self.browser.set_script_timeout(self.default_timeout)
        # Sets the width and height of the current window$
        self.browser.set_window_size(1920, 1080)
        # Opens the page
        self.open_url()
Пример #6
0
def proxies():
    if os.path.isfile('proxies.txt'):
        pass
    else:
        req_proxy = RequestProxy()
        PROXIES = "{0}".format(
            list(map(lambda x: x.get_address(), req_proxy.get_proxy_list())))
        # Open the file for writing
        F = open('proxies.txt', 'w')
        F.writelines(PROXIES)
        F.close()
        pass
Пример #7
0
    def __init__(self,
                 db,
                 path_to_webdriver,
                 config=None,
                 logger=None,
                 cookies=None):
        self.logger = logger
        self.logger.info("webdriver path: ".format(path_to_webdriver))

        self.config = config

        chrome_options = ChromeOption()

        prefs = {"profile.default_content_setting_values.notifications": 2}
        chrome_options.add_experimental_option("prefs", prefs)

        # ignore error proxy
        chrome_options.add_argument('--ignore-certificate-errors')
        chrome_options.add_argument('--ignore-ssl-errors')

        # automatically dismiss prompt
        chrome_options.set_capability('unhandledPromptBehavior', 'dismiss')

        self.driver = webdriver.Chrome(path_to_webdriver,
                                       chrome_options=chrome_options)

        # get PROXY
        req_proxy = RequestProxy()
        proxies = req_proxy.get_proxy_list()

        # set PROXY
        PROXY = proxies[0].get_address()
        webdriver.DesiredCapabilities.CHROME['proxy'] = {
            "httpProxy": PROXY,
            "ftpProxy": PROXY,
            "sslProxy": PROXY,
            "proxyType": "MANUAL",
        }

        if cookies is None:
            self.cookies = self.driver.get_cookies()
        else:
            for cookie in cookies:
                self.driver.add_cookie(cookie)
            self.cookies = cookies
        self.db = db
class Proxies:
    def __init__(self):
        self.req_proxy = RequestProxy()
        self.proxies = self.req_proxy.get_proxy_list(
        )  # this will create proxy list
        self.total = len(self.proxies)
        self.getExtraProxies()
        pass

    def getExtraProxies(self):
        addrs = []
        with open("proxy_list.csv", 'r') as f:
            lines = f.readlines()
        for line in lines:
            items = line.split()
            if len(items) < 2:
                continue
            if len(items[0].split('.')) == 4:
                addrs.append(items[0] + ":" + items[1])
        self.addrs = addrs

    def getProxy(self):
        # if(self.pointer+1 > self.total):
        #     self.renewProxies()
        #     self.pointer = 0
        rand_n = random.randint(0, self.total - 1)
        prox = self.proxies[rand_n]
        if self.isValid(prox) == False:
            rand_n = random.randint(0, len(self.addrs) - 1)
            return self.addrs[rand_n]
        # while self.isValid(prox) == False:
        #     rand_n = random.randint(0, self.total-1)
        #     prox = self.proxies[rand_n]
        #     print(prox.get_address())
        #     pass
        # self.pointer += 1

        return prox

    def isValid(self, prox):
        checker = ProxyChecker()
        return checker.check_proxy(prox.get_address())

    def getAddresses(self):
        addrs = [x.get_address() for x in self.proxies]
        return addrs
Пример #9
0
def working_proxies(countries_list=None):
    req_proxy = RequestProxy(
    )  #you may get different number of proxy when  you run this at each time
    proxies = req_proxy.get_proxy_list()  #this will create proxy list

    ##################################preparation of proxy list###############################
    proxies_address = list(map(lambda x: x.get_address(), proxies))
    proxies_countries = list(map(lambda x: str(x.country), proxies))

    df = pd.DataFrame({'countries': proxies_countries, 'IP': proxies_address})

    #obtain the asia countries want to keep
    #countries=Counter(proxies_countries)
    #for key, value in countries.items():
    #   print(key, value)

    if countries_list is None:
        countries_list = [  #'Indonesia'                                                             # total 23, 8/20
            #'Cambodia'                                                             # total 8, 3/8
            #'India'                                                                # total 23, 7/20
            'Hong Kong',  # total 10, 6/10        chosen
            'Thailand',  # total 26, 10/20       chosen
            #'Nepal'                                                                # total 11,  3/11
            #'Myanmar',                                                              # total 4, 2/4
            #'Bangladesh'                                                           # total 12, 2/12
            #'Philippines'                                                          # total 3, 1/3
            #'Singapore'                                                            # total 7, 2/7
            #'Mongolia'                                                             # total 4, 0/4
            'Vietnam',  # total 11, 6/11        chosen
            'Pakistan',  # total 14, 7/14        chosen
            #'Japan'                                                                # total 11, 1/11
            #'Korea'                                                                # total  1, 0/1
            #'China'                                                                # total 7, 0/7
            'Macau'  # total 3, 3/3           chosen
        ]

    df = df[df['countries'].isin(
        countries_list)]  #a df, coulumn: countries, IP addresses

    b = in_list(countries_list, df)

    print_results(b)

    return df
Пример #10
0
class RequestMaker:
    def __init__(self):
        self.req_proxy = RequestProxy()

    def _generate_proxied_request(self, url, params=None):
        if params is None:
            params = {}
        for _ in range(0, len(self.req_proxy.get_proxy_list())):
            proxy_response = self.req_proxy.generate_proxied_request(
                url, params=params)
            if proxy_response is not None:
                return proxy_response
        return None

    def get(self, url, params=None):
        proxy_response = self._generate_proxied_request(url, params)
        if proxy_response is None:
            raise RuntimeError(
                'Failed to generate proxied request for {}'.format(url))

        return proxy_response
Пример #11
0
def Run(request):
    if request.method == 'POST':
        if request.POST.get('param'):

            from selenium import webdriver
            from selenium.webdriver.common.keys import Keys
            from selenium.common.exceptions import ElementClickInterceptedException
            from selenium.webdriver.common.action_chains import ActionChains
            from selenium.webdriver.common.by import By
            from selenium.webdriver.support.ui import WebDriverWait
            from selenium.webdriver.support import expected_conditions as EC
            import time
            from selenium.webdriver.support.ui import Select
            import random
            from http_request_randomizer.requests.proxy.requestProxy import RequestProxy
            from Home.models import Emails
            from selenium.webdriver.remote.webdriver import WebDriver

            #webdriver local location
            path = "C:\Program Files\chromedriver.exe"

            #list of tags and urls
            URLs = [
                [
                    "http://snappysurveys.net/", "email", "started",
                    "first_name", "last_name", "dob_month", "dob_day",
                    "dob_year", "addr_full", "addr_city",
                    "addr_state_province", "addr_zip", "addr_phone", "offers1",
                    "offers2", "offers3", "offers4", "offers5", "submitBtn"
                ],
                [
                    "https://youreducationfuture.com/", "firstname",
                    "lastname", "address", "city", "state", "email",
                    "areacode", "phone", "btn btn-large btn-primary"
                ],
                [
                    "https://www.nationalpayday.com/", "first_name", "email",
                    "amount", "option", "submit"
                ],
                [
                    "http://mycharityshopping.com/", "fname", "lname",
                    "exampleInputEmail1", "pwd1", "pwd2", "checkbox"
                ],
                [
                    "http://mortgageloans101.com/index.php/mortgage-quote-form/",
                    "wpforms[fields][9]", "wpforms[fields][10]",
                    "wpforms[fields][18]", "wpforms[fields][14]",
                    "wpforms[fields][7]", "wpforms[fields][15]",
                    "wpforms[fields][0][first]", "wpforms[fields][0][last]",
                    "wpforms[fields][3]", "wpforms[fields][2]",
                    "wpforms-107-field_12_1", "wpforms[submit]"
                ],
                [
                    "http://kidsdineforfree.com/", "fname", "lname", "email",
                    "pwd1", "pwd2", "newsletter", "frmaction"
                ],
                [
                    "http://emortgagefinders.com/", "input_5", "input_6",
                    "input_50", "input_8", "input_10", "input_51",
                    "input_12.3", "input_12.6", "input_14",
                    "gform_next_button_6_1"
                ],
                [
                    "http://consumerofferstore.com/", "fname", "lname",
                    "email", "contact", "state", "city", "country", "checkbox",
                    "checkbox1", "a-b3xqfy75bf3j", "Submit"
                ]
            ]

            #lise of american fake identities
            Identities = [
                [
                    "Mary", "M.Pfister", "NewYork", "Huntington",
                    "4662 Duncan Avenue", "+1 610-934-1119", "11743"
                ],
                [
                    "Raymond", "M.Gamboa", "Kentucky", "Owensboro",
                    "4072 Coffman Alley", "+1 270-691-3671", "42301"
                ],
                [
                    "Pamela", "K.Smith", "Georgia", "Atlanta",
                    "1707 Musgrave Street", "+1 404-934-8171", "30303"
                ],
                [
                    "Nadine", "B.Lowe", "Arizona", "Superstition",
                    "423 East Avenue", "+1 480-358-3654", "85207"
                ],
                [
                    "Oscar", "L.Merrill", "Georgia", "Atlanta",
                    "411 Pine Garden Lane", "+1 770-741-7993", "30305"
                ],
                [
                    "Theresa", "K.Johnson", "Florida", "Sunrise",
                    "1116 Ridenour Street", "+1 786-306-3113", "33323"
                ],
                [
                    "Theodore", "J.Mejia", "Georgia", "Atlanta",
                    "2207 Edington Drive", "+1 678-799-9599", "30303"
                ],
                [
                    "Kenneth", "E.Pabon", "Maryland", "Sykesville",
                    "15 Woodhill Avenue", "+1 410-795-2288", "21784"
                ],
                [
                    "Juanita", "J.Perry", "Iowa", "Des Moines",
                    "4372 Southern Avenue", "+1 641-328-8365", "50309"
                ],
                [
                    "Pamela", "J.Chancellor", "Iowa", "Westside",
                    "2497 Centennial Farm Road", "+1 712-663-4676", "51467"
                ],
                [
                    "Mack", "P.King", "California", "Burbank",
                    "2181 Quiet Valley Lane", "+1 818-972-1068", "91502"
                ],
                [
                    "Chris", "M.Bibb", "Ohio", "Dayton", "1580 College Avenue",
                    "+1 937-434-9264", "45459"
                ],
                [
                    "Dorothy", "J.Honeycutt", "New Jersey", "Camden",
                    "939 Valley Street", "+1 856-885-6555", "08102"
                ],
                [
                    "Scott", "E.Brown", "California", "Bakersfield",
                    "179 Atha Drive", "+1 661-586-6085", "93304"
                ],
                [
                    "Barry", "L.Murchison", "Kentucky", "Pleasant Ridge",
                    "2210 Broaddus Avenue", "+1 270-275-3710", "40769"
                ],
                [
                    "Maye", "L.Moseley", "Michigan", "Grand Rapids",
                    "916 Goff Avenue", "+1 269-589-1746", "49503"
                ],
                [
                    "Jerry", "Y.Winn", "Tennessee", "Portland",
                    "422 Frum Street", "+1 615-325-8391", "37148"
                ],
                [
                    "Andrew", "N.Jones", "Ohio", "Cincinnati",
                    "2576 Goldie Lane", "+1 513-374-9889", "45214"
                ],
                [
                    "Timothy", "B.Frye", "California", "Sherman Oaks",
                    "3789 Par Drive", "+1 805-808-3371", "91403"
                ],
                [
                    "Kevin", "D.Carrillo", "Alabama", "Opelika",
                    "1774 Fleming Street", "+1 334-364-1184", "36801"
                ]
            ]
            #["fname","lname","state","city","adress","phone number"]

            #list of e-mails
            emails = [
                1, 2, 3, 4, 5, 6, 7, 8, 9, 7, 8, 9, 7, 8, 9, 7, 8, 9, 7, 8, 9,
                7, 8, 9, 7, 8, 9
            ]

            #list of proxies
            PROXIES = [
                '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0',
                '0', '0', '0', '0', '0', '0', '0', '0'
            ]
            req_proxy = RequestProxy(
            )  #you may get different number of proxy when  you run this at each time
            proxies = req_proxy.get_proxy_list()  #this will create proxy list
            for i in range(20):
                PROXIES[i] = proxies[i].get_address()

            count = Emails.objects.all().count()
            emails[0] = Emails.objects.first().Email
            k = Emails.objects.first().pk
            k = k + 1
            for j in range(1, count - 1):
                if Emails.objects.get(pk=k):

                    emails[j] = Emails.objects.get(pk=k).Email
                    k = k + 1
                else:
                    k = k + 1

            #Registration in 1st website
            for email in emails:
                proxy = random.choice(PROXIES)
                webdriver.DesiredCapabilities.CHROME['proxy'] = {
                    "httpProxy": proxy,
                    "ftpProxy": proxy,
                    "sslProxy": proxy,
                    "proxyType": "MANUAL",
                }
                i = random.randint(0, 20)
                browser = webdriver.Chrome(path)
                browser.maximize_window()
                browser.get(URLs[0][0])
                time.sleep(2)
                element = browser.find_element_by_name(URLs[0][1])
                element.send_keys(email)
                element = browser.find_element_by_class_name(URLs[0][2])
                element.click()
                window_after = browser.window_handles[1]
                browser.switch_to.window(window_after)
                time.sleep(5)
                element = browser.find_element_by_name(URLs[0][3])
                element.send_keys(Identities[i][0])
                element = browser.find_element_by_name(URLs[0][4])
                element.send_keys(Identities[i][1])
                browser.find_element_by_xpath(
                    "//select[@name='dob_month']/option[text()='December']"
                ).click()
                browser.find_element_by_xpath(
                    "//select[@name='dob_day']/option[text()='1']").click()
                browser.find_element_by_xpath(
                    "//select[@name='dob_year']/option[text()='2000']").click(
                    )
                element = browser.find_element_by_name(URLs[0][8])
                element.send_keys(Identities[i][4])
                element = browser.find_element_by_name(URLs[0][9])
                element.send_keys(Identities[i][3])
                element = browser.find_element_by_name(URLs[0][10])
                element.send_keys(Identities[i][2])
                element = browser.find_element_by_name(URLs[0][11])
                element.send_keys(Identities[i][6])
                element = browser.find_element_by_name(URLs[0][12])
                element.send_keys(Identities[i][5])
                element = browser.find_element_by_name(URLs[0][13])
                element.click()
                element = browser.find_element_by_name(URLs[0][14])
                element.click()
                element = browser.find_element_by_name(URLs[0][15])
                element.click()
                element = browser.find_element_by_name(URLs[0][16])
                element.click()
                browser.find_element_by_xpath(
                    "//input[@name='offers4' and @value=5]").click()
                element = browser.find_element_by_name(URLs[0][18])
                element.click()
                browser.close()

            #Registration in 2nd website
            for email in emails:
                proxy = random.choice(PROXIES)
                webdriver.DesiredCapabilities.CHROME['proxy'] = {
                    "httpProxy": proxy,
                    "ftpProxy": proxy,
                    "sslProxy": proxy,
                    "proxyType": "MANUAL",
                }
                i = random.randint(0, 20)
                browser = webdriver.Chrome(path)
                browser.get(URLs[1][0])
                time.sleep(2)
                element = browser.find_element_by_name(URLs[1][1])
                element.send_keys(Identities[i][0])
                element = browser.find_element_by_name(URLs[1][2])
                element.send_keys(Identities[i][1])
                element = browser.find_element_by_name(URLs[1][3])
                element.send_keys(Identities[i][4])
                element = browser.find_element_by_name(URLs[1][4])
                element.send_keys(Identities[i][3])
                element = browser.find_element_by_name(URLs[1][5])
                element.send_keys(Identities[i][2])
                element = browser.find_element_by_name(URLs[1][6])
                element.send_keys(email)
                element = browser.find_element_by_name(URLs[1][7])
                element.send_keys('907')
                element = browser.find_element_by_name(URLs[1][8])
                element.send_keys(Identities[i][5])
                element = browser.find_element_by_class_name(URLs[1][9])
                element.click()
                browser.close()

            #Registration in 3nd website
            for email in emails:
                proxy = random.choice(PROXIES)
                webdriver.DesiredCapabilities.CHROME['proxy'] = {
                    "httpProxy": proxy,
                    "ftpProxy": proxy,
                    "sslProxy": proxy,
                    "proxyType": "MANUAL",
                }
                i = random.randint(0, 20)
                browser = webdriver.Chrome(path)
                browser.get(URLs[2][0])
                time.sleep(2)
                element = browser.find_element_by_name(URLs[2][1])
                element.send_keys(Identities[i][0])
                element = browser.find_element_by_name(URLs[2][2])
                element.send_keys(email)
                browser.find_element_by_xpath(
                    "//select[@name='amount']/option[text()='$600']").click()
                element = browser.find_element_by_name(URLs[2][5])
                element.click()
                browser.close()

            #Registration in 4th website
            for email in emails:
                proxy = random.choice(PROXIES)

                webdriver.DesiredCapabilities.CHROME['proxy'] = {
                    "httpProxy": proxy,
                    "ftpProxy": proxy,
                    "sslProxy": proxy,
                    "proxyType": "MANUAL",
                }
                i = random.randint(0, 20)
                browser = webdriver.Chrome(path)
                browser.get(URLs[3][0])
                time.sleep(2)
                element = browser.find_element_by_name(URLs[3][1])
                #element.send_keys(Identities[i][0])
                element = browser.find_element_by_name(URLs[3][2])
                element.send_keys(Identities[i][1])
                element = browser.find_element_by_id(URLs[3][3])
                element.send_keys(email)
                element = browser.find_element_by_name(URLs[3][4])
                element.send_keys("password")
                element = browser.find_element_by_name(URLs[3][5])
                element.send_keys("password")
                element = browser.find_element_by_class_name(URLs[3][6])
                element.click()
                element = browser.find_element_by_xpath(
                    "//button[@type='submit' and @class='btn btn-orange']")
                element.click()
                browser.close()

            #Registration in 5th website
            for email in emails:
                proxy = random.choice(PROXIES)

                webdriver.DesiredCapabilities.CHROME['proxy'] = {
                    "httpProxy": proxy,
                    "ftpProxy": proxy,
                    "sslProxy": proxy,
                    "proxyType": "MANUAL",
                }
                i = random.randint(0, 20)
                browser = webdriver.Chrome(path)
                browser.get(URLs[4][0])
                time.sleep(2)
                browser.find_element_by_xpath(
                    "//select[@name='wpforms[fields][9]']/option[text()='Refinance']"
                ).click()
                browser.find_element_by_xpath(
                    "//select[@name='wpforms[fields][10]']/option[text()='Condo']"
                ).click()
                browser.find_element_by_xpath(
                    "//select[@name='wpforms[fields][18]']/option[text()='Alabama']"
                ).click()
                browser.find_element_by_xpath(
                    "//select[@name='wpforms[fields][14]']/option[text()='$50,000 - $60,000']"
                ).click()
                browser.find_element_by_xpath(
                    "//select[@name='wpforms[fields][7]']/option[text()='Excellent (800+)']"
                ).click()
                browser.find_element_by_xpath(
                    "//select[@name='wpforms[fields][15]']/option[text()='Yes']"
                ).click()
                element = browser.find_element_by_name(URLs[4][7])
                element.send_keys(Identities[i][0])
                element = browser.find_element_by_name(URLs[4][8])
                element.send_keys(Identities[i][1])
                element = browser.find_element_by_name(URLs[4][9])
                element.send_keys(Identities[i][5])
                element = browser.find_element_by_name(URLs[4][10])
                element.send_keys(email)
                element = browser.find_element_by_xpath(
                    "//input[@type='checkbox' and @id='wpforms-107-field_12_1']"
                )
                element.click()
                #element = browser.find_element_by_id(URLs[4][11])
                #element.click()
                element = browser.find_element_by_name(URLs[4][12])
                element.click()
                browser.close()

            #Registration in 6th website
            for email in emails:
                proxy = random.choice(PROXIES)

                webdriver.DesiredCapabilities.CHROME['proxy'] = {
                    "httpProxy": proxy,
                    "ftpProxy": proxy,
                    "sslProxy": proxy,
                    "proxyType": "MANUAL",
                }
                i = random.randint(0, 20)
                browser = webdriver.Chrome(path)
                browser.get(URLs[5][0])
                time.sleep(2)
                element = browser.find_element_by_name(URLs[5][1])
                element.send_keys(Identities[i][0])
                element = browser.find_element_by_name(URLs[5][2])
                element.send_keys(Identities[i][1])
                element = browser.find_element_by_name(URLs[5][3])
                element.send_keys(email)
                element = browser.find_element_by_name(URLs[5][4])
                element.send_keys("password")
                element = browser.find_element_by_name(URLs[5][5])
                element.send_keys("password")
                element = browser.find_element_by_name(URLs[5][6])
                element.click()
                element = browser.find_element_by_name(URLs[5][7])
                element.click()
                browser.close()

        return render(request, 'Home/Home.html')
Пример #12
0
def loadProxies():
    req_proxy = RequestProxy()
    proxies = req_proxy.get_proxy_list()  # this will create proxy list
    data = {"proxies": [proxy.get_address() for proxy in proxies]}
    return data
Пример #13
0
def Init_Proxy():
    req_proxy = RequestProxy()
    proxies = req_proxy.get_proxy_list()
    proxy_cycle = cycle(proxies)
    return proxy_cycle
Пример #14
0
def randomProxy():
    listOfProxies = RequestProxy()
    myProxy = listOfProxies.get_proxy_list()
    return myProxy[0]
Пример #15
0
# Results are written to disk after each chunk. Order is preserved.
# To reduce slowdowns due to disk IO, set this to a large-ish number
CHUNKSIZE = 200

# Min and max backoff after each thread's scrape, in seconds
BACKOFF_MIN = 1
BACKOFF_MAX = 5

####################################
# GLOBAL OBJECTS REQUIRED BY SCRAPER
####################################
LOGGER = setup_logger(name=os.path.basename(__file__), log_to_file=True, level=logging.INFO, tqdm_support=True)
if USE_PROXY:
	LOGGER.info("Initializing RequestProxy()")
	REQ_PROXY = RequestProxy()
	LOGGER.info("{0} proxies loaded.".format(len(REQ_PROXY.get_proxy_list())))
else:
	UA = UserAgent()


####################################
# BASE FUNCTIONS REQUIRED BY SCRAPER
####################################
def get_content_from_url(url):
	"""
		Scrapes the page in two modes:
			USE_PROXY=True: random proxies and user-agents
			USE_PROXY=False: random user-agents and sleep time

		Retries for connection problems including timeouts, up to NUM_ATTEMPTS
		Will not retry for HTTP errors (e.g. 404, 500)
Пример #16
0
TOR_FOLDER = os.path.join(os.getcwd(), 'tor')
TOR_PATH = type('Enum', (), {
    'WINDOWS': os.path.join(TOR_FOLDER, 'windows', 'tor.exe'),
    'MAC': os.path.join(TOR_FOLDER, 'mac', 'tor.real'),
    'LINUX': os.path.join(TOR_FOLDER, 'linux', 'tor'),
    'NONE': ''
})

BROWSER_OPTIONS = type('Enum', (), {
    'CHROME': ChromeOptions(),
    'FIREFOX': FirefoxOptions()
})

request_proxy = RequestProxy()
request_proxy.set_logger_level(40)
proxies = request_proxy.get_proxy_list()


def hidden(browser_options=BROWSER_OPTIONS.FIREFOX):
    if type(browser_options) == ChromeOptions:
        browser_options.add_argument('--incognito')
        browser_options.add_argument('--disable-blink-features=AutomationControlled')
    elif type(browser_options) == FirefoxOptions:
        browser_options.add_argument('--private') 
        browser_options.set_preference("dom.webdriver.enabled", False)
        browser_options.set_preference('useAutomationExtension', False)
    return browser_options


def simplify(browser_options=BROWSER_OPTIONS.FIREFOX):
    if type(browser_options) == ChromeOptions:
Пример #17
0
def get_proxies():
    req_proxy = RequestProxy(
    )  # you may get different number of proxy when  you run this at each time
    proxies = req_proxy.get_proxy_list()  # this will create proxy list
    return proxies
Пример #18
0
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import Select
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import TimeoutException
from http_request_randomizer.requests.proxy.requestProxy import RequestProxy
from webdriver_manager.chrome import ChromeDriverManager
import time
from time import sleep
import random

accNo = int(input("No of Accounts: "))

print("Requesting Proxies:")

req_proxy = RequestProxy()
proxies = req_proxy.get_proxy_list()
maxNum = len(proxies)
print("\n \n Max No of Proxies:" + str(maxNum) + "\n \n")

i = 0

while accNo > i:
    k = random.randint(0, maxNum - 1)
    l = random.randint(0, maxNum - 1)
    PROXY = proxies[k].get_address()
    PROXY_PLACE = proxies[k].country
    first_name = (random.choice(open("Fnames.txt").read().split()))
    last_name = (random.choice(open("Lnames.txt").read().split()))
    full_name = (first_name + ' ' + last_name)
    username = (first_name + last_name + '.' + str(random.randint(1, 100)) +
                str(random.randint(1, 1000)))
        urllib.request.install_opener(opener)
        req = urllib.request.Request('http://www.google.com')
        urllib.request.urlopen(req)
    except urllib.error.HTTPError as error:
        print('Error code: ', error.code)
        return error.code
    except Exception as detail:
        print("ERROR:", detail)
        return True
    return False


SCHED = BlockingScheduler()

REQ_PROXY = RequestProxy()
PROXIES = REQ_PROXY.get_proxy_list()
NEXT_RUN = datetime.now() + timedelta(hours=3)

selenium_logger.setLevel(logging.WARNING)


class Parser:
    """
    Main Parser class.
    """
    def __init__(self, use_proxy=False):
        """
        Parser initialisation by launching the browser.
        """
        self.browser = self.browser_setup(use_proxy=use_proxy)
        self.tg_sources_path = 'telegram_parsing/channels.txt'
Пример #20
0
def get_proxy():
    req_proxy = RequestProxy()
    proxies = req_proxy.get_proxy_list()
    return _get_working_proxy(proxies)
Пример #21
0
from apscheduler.schedulers.background import BackgroundScheduler
from http_request_randomizer.requests.proxy.requestProxy import RequestProxy
from handlers import globalvars


def ipgrab():
    print("started extraction ip")
    globalvars.req_proxy_list = req_proxy.get_proxy_list()
    print("done")

minutes = 5
interval = 60 * minutes

print("loading")
req_proxy = RequestProxy()
globalvars.req_proxy_list = req_proxy.get_proxy_list()
print("done initial load")
sched = BackgroundScheduler()
sched.add_job(ipgrab, 'interval', seconds=interval)
sched.start()
print("made it here")
Пример #22
0
#!/usr/bin/env python2.7
import time
from http_request_randomizer.requests.proxy.requestProxy import RequestProxy

if __name__ == '__main__':

    start = time.time()
    req_proxy = RequestProxy()
    print "Initialization took: {0} sec".format((time.time() - start))
    print "Size : ", len(req_proxy.get_proxy_list())
    print " ALL = ", req_proxy.get_proxy_list()

    test_url = 'http://ipv4.icanhazip.com'

    while True:
        start = time.time()
        request = req_proxy.generate_proxied_request(test_url)
        print "Proxied Request Took: {0} sec => Status: {1}".format(
            (time.time() - start), request.__str__())
        if request is not None:
            print "\t Response: ip={0}".format(u''.join(
                request.text).encode('utf-8'))
        print "Proxy List Size: ", len(req_proxy.get_proxy_list())

        print "-> Going to sleep.."
        time.sleep(1)
Пример #23
0
        "httpProxy":PROXY,
        "ftpProxy":PROXY,
        "sslProxy":PROXY,
        "proxyType":"MANUAL",
        'trustAllServers':'true',
        
    }
    browser = webdriver.Chrome(options=chrome_options)
    return browser

#logging.getLogger('requests.packages.urllib3.connectionpool').setLevel(logging.WARNING)
#from selenium.webdriver.remote.remote_connection import LOGGER
#LOGGER.setLevel(logging.WARNING)

req_proxy = RequestProxy() #you may get different number of proxy when  you run this at each time
proxies = req_proxy.get_proxy_list() #this will create proxy list
sp = [] #int is list of Indian proxy
for proxy in proxies:
    if proxy.country == 'Spain':
        sp.append(proxy)
proxies=sp


browser_categories=["popular","blocks","featured","beta"]
data=get_all_popular_plugins()
json_data=json.dumps(data)

current_date_and_time = datetime.datetime.now()
current_date_and_time_string = str(current_date_and_time)
extension = ".json"
Пример #24
0
import time
from http_request_randomizer.requests.proxy.requestProxy import RequestProxy

if __name__ == '__main__':

    start = time.time()
    req_proxy = RequestProxy()
    print("Initialization took: {0} sec".format((time.time() - start)))
    print("Size: {0}".format(len(req_proxy.get_proxy_list())))
    print("ALL = {0} ".format(list(map(lambda x: x.get_address(), req_proxy.get_proxy_list()))))

    test_url = 'http://ipv4.icanhazip.com'

    while True:
        start = time.time()
        request = req_proxy.generate_proxied_request(test_url)
        print("Proxied Request Took: {0} sec => Status: {1}".format((time.time() - start), request.__str__()))
        if request is not None:
            print("\t Response: ip={0}".format(u''.join(request.text).encode('utf-8')))
        print("Proxy List Size: {0}".format(len(req_proxy.get_proxy_list())))

        print("-> Going to sleep..")
        time.sleep(10)