Ejemplo n.º 1
0
def masshttp(*args):
    '''
   this function gather up thousands of HTTP / HTTPS proxies from www.proxyserverlist24.top and proxy-daily.com
   those proxies are not recommended to be used as reliable ones all the time, i use them here just to distribute my attacks
   on next functions...
   if you are willing to use them please check them first!!!
   the function takes an argument (*args) which is the number of proxies to return, in case of no argument given it will
   return the whole list.

   usage:

   >>>import bane
   >>>bane.masshttp()

   >>>bane.masshttp(1500)
 '''
    if args:
        m = args[0]
    else:
        m = 999999
    u = "http://www.proxyserverlist24.top/#"
    t = []
    l = []
    h = []
    try:
        h3tags = crawl(u)
        for a in h3tags:
            try:
                if (("proxy-server" in str(a)) and ("#" not in (str(a)))):
                    try:
                        if (a not in l):
                            l.append(a)
                    except Exception as xx:
                        pass
            except Exception as ex:
                pass
                continue
    except Exception as e:
        pass
    for u in l:
        try:
            a = requests.get(u, {'User-Agent': random.choice(ua)}, timeout=10)
            ips = re.findall(
                "(?:[\d]{1,3})\.(?:[\d]{1,3})\.(?:[\d]{1,3})\.(?:[\d]{1,3}):(?:[\d]{1,5})",
                a.text)
            t += ips
        except Exception as e:
            pass
    l = []
    u = "https://www.dailyfreeproxy.com/#"
    try:
        h3tags = crawl(u)
        for a in h3tags:
            try:
                if (("-http" in str(a)) and ("#" not in (str(a)))):
                    try:
                        a = str(a)
                        if (a not in l):
                            l.append(a)
                    except Exception as xx:
                        pass
            except Exception as ex:
                continue
    except Exception as e:
        pass
    for u in l:
        try:
            a = requests.get(u, {'User-Agent': random.choice(ua)}, timeout=10)
            ips = re.findall(
                "(?:[\d]{1,3})\.(?:[\d]{1,3})\.(?:[\d]{1,3})\.(?:[\d]{1,3}):(?:[\d]{1,5})",
                a.text)
            t += ips
        except Exception as e:
            pass
        u = "http://proxy-daily.com/#"
        try:
            r = requests.get(u, {'User-Agent': random.choice(ua)}).text
            soup = BeautifulSoup(r, 'html.parser')
            l = soup.findAll('div')
        except:
            pass
        p = []
        ips = []
        for x in l:
            try:
                ips = re.findall(
                    "(?:[\d]{1,3})\.(?:[\d]{1,3})\.(?:[\d]{1,3})\.(?:[\d]{1,3}):(?:[\d]{1,5})",
                    str(x))
                if (ips) and (ips not in p):
                    p.append(ips)
            except:
                pass
    try:
        t += p[0]
    except:
        pass
    if args:
        while True:
            o = random.choice(t)
            h.append(o)
            if (len(h) == m) or (len(h) == len(t)):
                break
    else:
        h = t
    return h
Ejemplo n.º 2
0
def massocks5(*args):
    '''
   this function gather up thousands of SOCKS5 proxies from www.proxyserverlist24.top and proxy-daily.com
   those proxies are not recommended to be used as reliable ones all the time, i use them here just to distribute my attacks
   on next functions...
   if you are willing to use them please check them first!!!
   the function takes an argument which is the number of proxies to return (*args) , in case of no argument given it will
   return the whole list.

   usage:

   >>>import bane
   >>>bane.massocks5()

   >>>bane.massocks5(500)
 '''
    if args:
        m = args[0]
    else:
        m = 999999
    l = []
    t = []
    u = "https://www.dailyfreeproxy.com/#"
    try:
        h3tags = crawl(u)
        for a in h3tags:
            try:
                if (("-socks5" in str(a)) and ("#" not in (str(a)))):
                    try:
                        a = str(a)
                        if (a not in l):
                            l.append(a)
                    except Exception as xx:
                        pass
            except Exception as ex:
                continue
    except Exception as e:
        pass
    for u in l:
        try:
            a = requests.get(u, {'User-Agent': random.choice(ua)}, timeout=10)
            ips = re.findall(
                "(?:[\d]{1,3})\.(?:[\d]{1,3})\.(?:[\d]{1,3})\.(?:[\d]{1,3}):(?:[\d]{1,5})",
                a.text)
            t += ips
        except Exception as e:
            pass
    u = "http://www.live-socks.net/#"
    l = []
    s5 = []
    try:
        r = requests.get(u, {'User-Agent': random.choice(ua)}).text
        soup = BeautifulSoup(r, 'html.parser')
        h3 = soup.find_all('h3', class_='post-title entry-title')
        for ha in h3:
            h3tags = ha.find_all('a')
            for a in h3tags:
                try:
                    a = str(a)
                    if ("socks-5" in a):
                        a = a.split('href="')[1]
                        a = a.split('"')[0]
                        if (a not in l):
                            l.append(a)
                except Exception as ex:
                    continue
    except Exception as e:
        pass
    for u in l:
        try:
            a = requests.get(u, {'User-Agent': random.choice(ua)}, timeout=10)
            ips = re.findall(
                "(?:[\d]{1,3})\.(?:[\d]{1,3})\.(?:[\d]{1,3})\.(?:[\d]{1,3}):(?:[\d]{1,5})",
                a.text)
            for x in ips:
                if (x not in t):
                    t.append(x)
        except Exception as e:
            pass
    if args:
        while True:
            o = random.choice(t)
            s5.append(o)
            if (len(s5) == m) or (len(s5) == len(t)):
                break
    else:
        s5 = t
    return s5