Ejemplo n.º 1
0
class CleanProxies(object):
    def __init__(self):
        self.proxy = None
        self.IDs = Queue()
        self.isAlive = True
        self.requestsMade = 0
        self.msgDisplay = False

    def isClean(self, ip, fails=3):
        if any([self.requestsMade == MAX_REQUESTS, not self.proxy]):
            self.changeID()
        try:
            if not self.isAlive: return
            self.requestsMade += 1
            stat = get(self.ip_checker + ip,
                       proxies=self.proxy).json()['suspicious_factors']
            return not all([
                stat['is_proxy'], stat['is_suspicious'], stat['is_tor_node'],
                stat['is_spam']
            ])
        except:
            if fails: self.isClean(ip, fails - 1)
            else: self.changeID()

    def changeID(self):
        # bypass 429 error
        if not self.isAlive: return
        self.requestsMade = 0
        if not self.msgDisplay:
            print '[!] Searching for secure proxies ...'
            self.msgDisplay = True

        if not self.IDs.qsize: self.scrapeProxies()

        proxy = self.IDs.get()
        try:
            self.proxy = {
                'https': 'http://{}:{}'.format(proxy['ip'], proxy['port'])
            }
        except:
            pass

    def scrapeProxies(self):
        scraper = lib.proxyScraper.Scrape(maxSize=10, protocol='SSL')
        scraper.scrape()
        while all([self.isAlive, scraper.proxies.qsize]):
            self.IDs.put(scraper.proxies.get())
Ejemplo n.º 2
0
class Views(Browser):

    def __init__(self, urllist, visits, min, max):

        self.bots = 5 # max amount of bots to use
        self.count = 0 # returning bots
        self.ip = None
        self.alive = True
        self.targets = {} # {url: visits}
        self.ip_usage = 0
        self.ip_fails = 0
        self.max_fails = 3
        self.max_usage = 3
        self.proto = 'https'
        self.recentIPs = Queue(15)
        self.requesting_ip = False

        self.min = int(min)
        self.max = int(max)
        self.visits = int(visits)

        if not path.exists(urllist):
            exit('Error: Unable to locate `{}`'.format(urllist))

        # read the url list
        with open(urllist, 'r') as f:
            try:
                for url in [_ for _ in f.read().split('\n') if _]:
                    self.targets[url] = 0 # initial view
            except Exception as err:exit('Error:', err)

    def display(self, url):
        n = '\033[0m'  # null ---> reset
        r = '\033[31m' # red
        g = '\033[32m' # green
        y = '\033[33m' # yellow
        b = '\033[34m' # blue

        call([cls])
        print('')
        print('  +------ Youtube Views ------+')
        print('  [-] Url: {}{}{}'.format(g, url, n))
        print('  [-] Proxy IP: {}{}{}'.format(b, self.ip['ip'], n))
        print('  [-] Visits: {}{}{}'.format(y, self.targets[url], n))
        if not self.alive:self.exit()

    def visit(self, url):
        try:
            if self.watch(url):
                views = self.targets[url]
                self.targets[url] = views + 1
        except:pass
        finally:
            try:
                sleep(1)
                self.count -= 1
            except:pass

    def connection(self):
        connected = False
        for _ in range(3):
            try:

                if not self.alive:self.exit()
                urlopen('https://example.com')
                connected = True
                break
            except:pass
        if not connected:
            print('Error: No Connection!')
            self.exit()

    def change_ip(self, ip):
        if not ip:
            self.connection()
            return
        else:
            if not ip in self.recentIPs.queue:
                self.set_ip(ip)

    def updateIp(self):
        if not self.alive:return
        if self.requesting_ip:return
        self.requesting_ip = True
        self.change_ip(IP(self.proto).get_ip())
        self.requesting_ip = False

    def set_ip(self, ip):
        self.ip = ip
        self.ip_usage = 0
        self.ip_fails = 0
        self.recentIPs.put(ip)

    def exit(self):
        self.alive = False
        exit()

    def run(self):
        ndex = 0
        while all([self.alive, len(self.targets)]):
            try:
                urls = [] # tmp list of the urls that are being visited
                if any([not self.ip, self.ip_fails >= self.max_fails, self.ip_usage >= self.max_usage]):
                    self.updateIp()
                    if not self.ip:
                        call([cls])
                        print('Working on obtaining a clean IP ...')
                    sleep(5)
                    continue
            except KeyboardInterrupt:self.exit()

            for _ in range(self.bots):
                try:
                    url = [_ for _ in self.targets][ndex]
                except IndexError:return
                except KeyboardInterrupt:self.exit()

                view = self.targets[url]
                if view >= self.visits:
                    del self.targets[url]
                    continue

                # if url in urls:continue # prevent wrapping
                # if not self.ip:continue
                # if self.ip_fails >= self.max_fails:continue
                if any([url in urls, not self.ip, self.ip_fails >= self.max_fails]):continue
                ndex = ndex+1 if ndex < len(self.targets)-1 else 0
                Thread(target=self.visit, args=[url]).start()

                urls.append(url)
                self.count += 1
                self.ip_usage += 1
                try:sleep(1)
                except:self.exit()

            while all([self.count, self.alive]):
                for url in urls:
                    try:
                        self.display(url)
                        if not self.alive:self.exit()
                        if self.ip_fails >= self.max_fails:
                            self.count = 0
                        [sleep(1) for _ in range(7) if all([self.count, self.alive])]
                    except KeyboardInterrupt:self.exit()
                    except:pass
            else:pass
Ejemplo n.º 3
0
class Viewer(object):
    def __init__(self, url, views, visits=0):
        self.recentProxies = Queue()
        self.proxies = Queue()
        self.renewDriver = True
        self.isActive = True
        self.isAlive = True
        self.views = views
        self.visits = visits
        self.url = url
        self.scraper = Scrape(maxSize=30, protocol='SSL', cleanProxies=True)

    def proxiesManager(self):
        while self.isAlive:
            while all([self.isAlive, self.proxies.qsize]):
                [
                    sleep(1) for _ in range(10) if self.isAlive
                    if self.proxies.qsize
                ]
                self.collect()
            if self.isAlive:
                Thread(target=self.scraper.scrape).start()
                while all([self.isAlive, self.scraper.proxies.qsize < 3]):
                    pass
            self.collect()
        self.scraper.isAlive = False

    def collect(self):
        while all([self.isAlive, self.scraper.proxies.qsize]):
            proxy = self.scraper.proxies.get()
            if not self.recentProxies.inQueue(proxy):
                self.recentProxies.put(proxy)
                self.proxies.put(proxy)

    def kill(self):
        self.isAlive = False

    def watch(self, proxy, driver):
        print '\n[!] Proxy-IP: {}\n[-] Country: {}\n[+] Views: {}\n'.format(
            proxy['ip'], proxy['country'], self.visits + 1)
        if not self.isAlive: return

        try:
            driver.get(self.url + '&t=5')
        except:
            self.renewDriver = True
            driver.quit()

        try:
            html = driver.page_source.encode('utf-8')
            if any([
                    'ERR_PROXY_CONNECTION_FAILED' in html,
                    'ERR_TUNNEL_CONNECTION_FAILED' in html,
                    'ERR_EMPTY_RESPONSE' in html
            ]):
                self.renewDriver = True
                driver.quit()
        except:
            self.renewDriver = True
            driver.quit()

        sleep(3)
        self.isActive = False
        if self.renewDriver: driver.quit()
        else: self.visits += 1

    def driver(self, proxy):
        chrome_options = webdriver.ChromeOptions()
        chrome_options.add_argument('--headless')
        chrome_options.add_argument('--mute-audio')
        chrome_options.add_argument('--log-level=3')
        chrome_options.add_argument('--disable-gpu')
        chrome_options.add_argument('user-agent={}'.format(
            choice(USER_AGENTS)))
        chrome_options.add_argument('--proxy-server=http://{}:{}'.format(
            proxy['ip'], proxy['port']))
        return webdriver.Chrome(executable_path=DRIVER_PATH,
                                chrome_options=chrome_options)

    def start(self):
        proxy = None
        driver = None
        driverUsage = 0
        renewDriver = True
        Thread(target=self.proxiesManager).start()

        while all([self.visits < self.views, self.isAlive]):
            try:
                if driverUsage == 10:
                    self.renewDriver = True

                if any([not self.isAlive, self.renewDriver]):
                    proxy = None
                    if driver: driver.quit()
                    if self.proxies.qsize:
                        driverUsage = 0
                        self.renewDriver = False
                        proxy = self.proxies.get()
                        driver = self.driver(proxy)

                if all([self.proxies.qsize, proxy]):
                    self.isActive = True
                    if not proxy:
                        proxy = self.proxies.get()

                    Thread(target=self.watch, args=[proxy, driver]).start()

                    # wait
                    while self.isActive:
                        try:
                            sleep(0.5)
                            self.removeDebug()
                        except KeyboardInterrupt:
                            self.isAlive = False

                    driverUsage += 1
                    if any([not self.isAlive, self.renewDriver]):
                        proxy = None
                        if driver: driver.quit()
                        if self.proxies.qsize:
                            driverUsage = 0
                            self.renewDriver = False
                            proxy = self.proxies.get()
                            driver = self.driver(proxy)

            except KeyboardInterrupt:
                self.isAlive = False

        if driver: driver.quit()
        self.isAlive = False
        self.removeDebug()
        if self.visits == self.views:
            self.visits = 0

    def removeDebug(self):
        if path.exists(DEBUG_LOG):
            remove(DEBUG_LOG)
Ejemplo n.º 4
0
class Scrape(CleanProxies):
    def __init__(self,
                 port=None,
                 protocol=None,
                 country=None,
                 maxSize=None,
                 cleanProxies=False):
        self.socks_proxies = 'https://socks-proxy.net'
        self.ssl_proxies = 'https://sslproxies.org'
        self.ip_checker = 'https://ip-api.io/json/'
        self.port = str(port) if port else None
        self.cleanIP = cleanProxies
        self.protocol = protocol
        self.maxSize = maxSize
        self.country = country
        self.proxies = Queue()
        self.isAlive = True

        super(Scrape, self).__init__()

    def parse(self, proxy, ssl=False):
        detail = {
            'ip': proxy[0].string,
            'port': proxy[1].string,
            'protocol': 'SSL' if ssl else proxy[4].string,
            'anonymity': proxy[4 if ssl else 5].string,
            'country': proxy[3].string,
            'updated': proxy[7].string,
            'https': proxy[6].string
        }

        if all([self.protocol, self.country, self.port]):
            if detail['protocol'].lower() == self.protocol.lower():
                if detail['country'].lower() == self.country.lower():
                    if detail['port'] == self.port:
                        return detail
        elif all([self.protocol, self.country]):
            if detail['protocol'].lower() == self.protocol.lower():
                if detail['country'].lower() == self.country.lower():
                    return detail
        elif all([self.protocol, self.port]):
            if detail['protocol'].lower() == self.protocol.lower():
                if detail['port'] == self.port:
                    return detail
        elif all([self.country, self.port]):
            if detail['country'].lower() == self.country.lower():
                if detail['port'].lower() == self.port:
                    return detail
        elif self.protocol:
            return None if detail['protocol'].lower() != self.protocol.lower(
            ) else detail
        elif self.country:
            return None if detail['country'].lower() != self.country.lower(
            ) else detail
        elif self.port:
            return None if detail['port'] != self.port else detail
        else:
            return detail

    def fetch(self, url, ssl=False):
        try:
            proxies = bs(get(url).text,
                         'html.parser').find('tbody').findAll('tr')
        except:
            return

        for proxy in proxies:
            if not self.isAlive: break
            data = self.parse(proxy.findAll('td'), ssl)
            if data:
                if self.maxSize:
                    if self.proxies.qsize < self.maxSize:
                        if self.cleanIP:
                            if self.isClean(data['ip']):
                                self.proxies.put(data)
                        else:
                            self.proxies.put(data)
                    else:
                        break
                else:
                    if self.cleanIP:
                        if self.isClean(data['ip']):
                            self.proxies.put(data)
                    else:
                        self.proxies.put(data)

    def scrape(self, fails=3):
        self.fetch(self.ssl_proxies, True)
        self.fetch(self.socks_proxies)


## Example I
# n = Scrape()
# n.scrape()
# print n.proxies

## Example II, by port
# n = Scrape(port=1080)
# n.scrape()
# print n.proxies

## Example III, by size
# n = Scrape(maxSize=10)
# n.scrape()
# print n.proxies

## Example IV, by port and maxSize
# n = Scrape(port=1080, maxSize=3)
# n.scrape()
# print n.proxies

## Example V, by port, maxSize, clean
# n = Scrape(port=1080, maxSize=3, cleanProxies=True)
# n.scrape()
# print n.proxies