Beispiel #1
0
    def rec_ret(self, file, filename):
        print "Looking for file %s" % filename

        # if we have the file
        if self.checkfile(file):
            print("Found!\n")
            return self.address
        # if we don't have the file
        else:
            print "I don't have file %s" % filename
            # if we have a successor, forward the search to our successor
            if self.successor:
                if self.is_root == 1:
                    print "Forwarding the search to %s...\n" % self.successor.hostname
                    suc = Proxy(self.successor.ip_addr, self.successor.port)
                    return suc.recursive(file, filename)
                else:
                    # if the search makes one full circle and comes back to the root, it means file does not exist
                    if self.successor.ip_addr == self.root_addr.ip_addr and self.successor.port == self.root_addr.port:
                        print "There is no %s in the ring...\n" % filename
                        return None
                    else:
                        print "Forwarding the search to %s...\n" % self.successor.hostname
                        suc = Proxy(self.successor.ip_addr,
                                    self.successor.port)
                        return suc.recursive(file, filename)
            # if we don't have a successor it means the file is not in the ring
            else:
                print "There is no %s in the ring...\n" % filename
                return None
Beispiel #2
0
    def join_network(self):

        # start event listener thread
        t = threading.Thread(target=self.start)
        t.daemon = True
        t.start()

        # if we are root
        if self.is_root == 1:
            self.root = None
            self.successor = None
            self.sucsuccessor = None
            self.predecessor = None
            self.sec_successor = None
            self.toString()

        # if we are peer
        else:
            self.predecessor = None
            self.sucsuccessor = None

            # contact the root to find our successor
            self.root = Proxy(self.root_addr.ip_addr, self.root_addr.port)
            self.successor = self.root.find_successor(self.address.NODEID)

            # set our predecessor
            succ = Proxy(self.successor.ip_addr, self.successor.port)
            tmp = succ.getpredec()
            if not tmp:
                succ.notify(self.address)
            else:
                pred = Proxy(tmp.ip_addr, tmp.port)
                pred.revnotify(self.address)
                pred.revnotify2(self.successor)
                self.predecessor = tmp
                succ.notify(self.address)

            # get successor's successor
            self.sucsuccessor = succ.getsucc()

            # inherit necessary files from successor
            self.inherit()

            # print current state of the node
            self.toString()

        # start stabilizing thread
        s = threading.Thread(target=self.periodical)
        s.daemon = True
        s.start()

        # don't kill the main thread so that deamon threads can survive
        while True:
            time.sleep(1)
Beispiel #3
0
def get_proxy_from_free_proxy():
    url = 'https://free-proxy-list.net/'
    header = {
        'User-Agent':
        'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36'
    }
    page = request = requests.get(url, headers=header)
    soup = BeautifulSoup(page.text, 'html.parser')
    table = soup.find('table')
    tbody = table.find('tbody')
    rows = tbody.find_all('tr')
    proxyset = set()
    for row in rows:
        cols = row.find_all('td')
        ip = cols[0].text
        port = cols[1].text
        code = cols[2].text
        country = cols[3].text
        anon = cols[4].text
        https = cols[6].text
        time = cols[7].text
        if https == 'no':
            proxy = Proxy(ip, port, anon, country,
                          '')  # ip, port, anon, country, iso
            proxyset.add(proxy)
    return list(proxyset)
def run_tests(links, testLevel=1, v=False):
    if (links == [] or links == None):
        bcolors.printFail("[-]Invalid input parameters! Exiting...")
        return
    proxyhandler = Proxy(True)
    proxyhandler.proxify()
    proxyhandler.validate_proxy()
    TestObjects = []
    for link in links:
        sqlI = True  # Used to check if we need to perform a second time-based sqlInjection test
        res = sql_error_scan(link, v)
        if (res != None):
            TestObjects.append(TestObject.TestObject(link, "SQLi", res))
            sqlI = False
        #Time based SQLi
        if (testLevel > 1 and sqlI):
            res = sql_time_scan(link, v)
            if (res != None):
                TestObjects.append(TestObject.TestObject(link, "SQLiT", res))
        #XSS
        if (testLevel > 2):
            res = xss_vuln_scan(link, v)
            if (res != None):
                TestObjects.append(TestObject.TestObject(link, "XSS", res))
    if (v):
        if (TestObjects != []):
            bcolors.printGreen("~*~*~*~*~PRINTING FOUND TARGETS~*~*~*~*~")
            for t in TestObjects:
                t.print_test()
        else:
            bcolors.printFail("~*~*~*~*~NO TARGETS FOUND~*~*~*~*~")
    return TestObjects
Beispiel #5
0
def get_proxy_list():
    try:
        url = 'https://www.proxy-list.download/api/v0/get?l=en&t=http'
        request = requests.get(url)
        json_lst = request.json()
    except:
        print('Não foi possível baixar lista de proxies...')
        exit()

    #pprint(json)
    DICT = json_lst[0]
    UPDATED = DICT.get('UPDATED')
    UPDATEDAV = DICT.get('UPDATEDAV')
    TOTAL = DICT.get('TOTAL')
    PAISES = DICT.get('PAISES')
    LISTA = DICT.get('LISTA')

    # CARREGA LISTA DE PROXIES
    # RETORNA UMA LISTA DE OBJETOS
    proxyset = set()
    for server in LISTA:
        proxy = Proxy(server.get('IP'), server.get('PORT'), server.get('ANON'),
                      server.get('COUNTRY'), server.get('ISO'))
        #print('adicionado=',proxy)s
        proxyset.add(proxy)
    return list(proxyset)
Beispiel #6
0
 def notify(self, addr):
     self.predecessor = addr
     # Only two nodes in ring
     if not self.successor:
         self.successor = addr
         suc = Proxy(addr.ip_addr, addr.port)
         suc.notify(self.address)
         self.sucsuccessor = suc.getsucc()
     self.toString()
Beispiel #7
0
 def deep_iter_proxy(arg):
     if isinstance(arg, Proxy):
         proxys.add(arg)
     elif isinstance(arg, basestring):
         try:
             proxy = Proxy(arg)
             proxys.add(proxy)
         except ProxyException:
             pass
     elif isinstance(arg, collections.Iterable):
         for item in arg:
             deep_iter_proxy(item)
Beispiel #8
0
def get_list_enable_proxy():
    res_list = []
    data = subprocess.check_output(
        ['iptables', '-t', 'nat', '-L', '--line-numbers', '-n'])
    data = data.decode('utf-8')
    lines_data = data.split('\n')
    for line in lines_data:
        if 'dpt' in line:
            line = search_ip_port(line)
            params = line.split(':')
            res_list.append(Proxy(params[1], params[2], int(params[0][:-3])))
    return res_list
Beispiel #9
0
    def find_success(self, id):

        # if we are the only node in the ring
        if self.is_root == 1 and not self.successor:
            return self.address
        else:
            # if we are the successor
            if self.inbetween(id, self.address.NODEID, self.successor.NODEID):
                return self.successor
            # else ask our successor
            else:
                suc = Proxy(self.successor.ip_addr, self.successor.port)
                return suc.find_successor(id)
Beispiel #10
0
 def periodical(self):
     while 1:
         # check every 3 seconds
         time.sleep(3)
         if self.successor:
             if not self.ping(self.successor):
                 # fix the broken segment by reassigning pointers
                 print("%s failed: Stabilizing...\n" %
                       self.successor.hostname)
                 if self.successor.NODEID == self.predecessor.NODEID:
                     self.reset()
                     self.toString()
                 else:
                     self.successor = self.sucsuccessor
                     sucsuc = Proxy(self.sucsuccessor.ip_addr,
                                    self.sucsuccessor.port)
                     self.sucsuccessor = sucsuc.getsucc()
                     sucsuc.notify(self.address)
                     pred = Proxy(self.predecessor.ip_addr,
                                  self.predecessor.port)
                     pred.revnotify2(self.successor)
                     self.toString()
Beispiel #11
0
 def __init__(self, province_name, log=None):
     """
     初始化代理,链接Session,ua
     :return:
     """
     self.ua = random.choice(user_agent)
     self.ss = Session()
     self.pro_name = province_name.lower()
     self.proxy_c = Proxy(self.pro_name)
     self.proxyInit()
     self.correct_http = 0
     self.error_http = 0
     self.log = log
     self.proxy = self.proxySet
Beispiel #12
0
    def inherit(self):
        suc = Proxy(self.successor.ip_addr, self.successor.port)
        s_list = suc.getindexfile()

        # for every item in our successor
        for i in s_list:
            # if it falls into our range
            if self.inbetween(i.NODEID, self.predecessor.NODEID,
                              self.address.NODEID):
                # add file to our list
                self.indexfile.append(i)
                print("Inherited %s from %s\n" %
                      (i.filename, self.successor.hostname))
                # remove the file from our successor's list
                suc.removefile(i)
    def __init__(self, useproxy, retries=None, verbose=False, sleep=5):
        self.urls = []  # contains scraped urls
        self.blacklist = []  # contains blacklisted proxies
        self.useproxy = useproxy  # dictates use of proxy
        self.retries = retries  # sets the number of search retries, if None => unlimited
        self.verbose = verbose  # sets verbosity level
        self.sleep = sleep  # dictates sleep while searching for urls
        self.cookie_jar = cookie_jar = http.cookiejar.CookieJar()

        self.proxyhandler = None
        if (self.useproxy):
            self.proxyhandler = Proxy(self.verbose)
            self.proxyhandler.proxify()
        if (self.verbose):
            bcolors.printGreen("[+]Search object created!")
def main():
    max_connection = None
    port = None
    for option in sys.argv[1:]:
        optionName, val = option.split('=')
        if optionName == 'max_connection':
            max_connection = int(val)
        elif optionName == 'port':
            port = int(val)

    proxy = Proxy(max_connection=max_connection, port=port)
    print('Main:: proxy program starts')
    CacheHandler.origin = os.getcwd()
    proxy.listenConnection()
    print('Main:: proxy program ends')
Beispiel #15
0
def add_proxys(list):
    """
    添加proxy 到sqlite数据库
    :param list: 代理列表
    :return:
    """
    session = conn(PROXYDB)
    for p in list:
        type = "socks5"
        try:
            proxy = Proxy(type=type, ip_port=p, failnum=0)
            session.add(proxy)
            session.commit()
        except Exception, e:
            print e
            pass
Beispiel #16
0
def get_proxy_from_clarketm():
    try:
        url = 'https://raw.githubusercontent.com/clarketm/proxy-list/master/proxy-list-raw.txt'
        txtlist = requests.get(url).text
    except:
        print('Não foi possível baixar lista de proxies do Clark...')
        exit()
    else:
        proxyset = set()
        lista = txtlist.split()
        for proxy in lista:
            ip = proxy.split(':')[0]
            port = proxy.split(':')[1]
            proxy = Proxy(ip, port, '', '', '')
            proxyset.add(proxy)
        return list(proxyset)  # set não tem indice, melhor converter em lista
Beispiel #17
0
def find_proxy():
    print('start proxy search')
    base_url = 'http://spys.one/proxies/'
    temp_proxies = []
    for i in range(5):
        url = base_url + str(i) + "/"
        driver.get(url)
        html = driver.page_source
        soup = BeautifulSoup(html, features="lxml")
        table = soup.select("table")[2]
        rows = table.select("tr")
        for row in rows:
            cols = row.select("td")
            if len(cols) > 4:
                if cols[0].text != 'Proxy адрес:порт':
                    host_string = cols[0].select("font")[1].text
                    split_by_colon = host_string.split(':')
                    host = split_by_colon[0].split('document')[0]
                    port = split_by_colon[2]

                    proxy_type = str(cols[1].select("font")[0].contents[0])

                    latency = cols[3].select("font")[0].text

                    country = str(cols[4].select("font")[0].contents[0])

                    proxy = Proxy(host, port, proxy_type, latency, country)

                    temp_proxies.append(proxy)

    proxies[ProxyType.HTTP].clear()
    proxies[ProxyType.SOCKS5].clear()

    for proxy_item in temp_proxies:
        item_proxy_type = proxy_item.proxy_type.upper()
        if item_proxy_type == ProxyType.HTTP.value:
            proxies[ProxyType.HTTP].append(proxy_item)
        elif item_proxy_type == ProxyType.SOCKS5.value:
            proxies[ProxyType.SOCKS5].append(proxy_item)

    proxies[ProxyType.HTTP].sort()
    proxies[ProxyType.SOCKS5].sort()

    print(str(len(proxies[ProxyType.HTTP])) + ' HTTP proxies found')
    print(str(len(proxies[ProxyType.SOCKS5])) + ' SOCKS proxies found')
 def newProxy(self, log):
     proxy = Proxy()
     i = 0  #Contador de proxies vivos
     for indice_fila, fila in self.proxyDf.iterrows():
         if fila['ESTADO'] == Proxy.Estado.no_uso and fila[
                 'SITUACION'] == Proxy.Situacion.nuevo:
             i = i + 1
             proxy.id = indice_fila
             proxy.cadena = fila['CADENA']
             proxy.estado = fila['ESTADO']
             proxy.situacion = fila['SITUACION']
             fila['ESTADO'] = Proxy.Estado.uso
             fila['SITUACION'] = Proxy.Situacion.nuevo
             return proxy
         else:
             continue
     #Si no hay proxis vivos, no devolvemos nada
     if i == 0:
         return None
Beispiel #19
0
def get_page(url):
    headers = {
        'user-agent': 'Mozilla/4.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3904.97 Safari/537.36'
    }

    proxy_manager = Proxy()
    proxy_list = proxy_manager.get_proxy()

    session = requests.Session()
    session.headers.update(headers)

    while True:
        try:
            proxy = proxy_list[random.randint(0, len(proxy_list) - 1)]
            session.proxies = proxy
            result = session.get(url)
            result.encoding = 'cp-1250'
            result = result.text
            return result
        except Exception:
            print('IP blocked or other error, ', )
Beispiel #20
0
    def run(self):
        while True:
            fileList = [f for f in listdir('ProxyFiles') if isfile(join('ProxyFiles', f)) and f != '.gitkeep']
            for file in fileList:
                print("New file ready for processing: " + file)

                lines = [line.strip() for line in open(join('ProxyFiles', file))]
                for line in lines:
                    if re.match('^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d{1,5}$', line) is not None:
                        proxyParts = line.split(":")
                        px = Proxy(proxyParts[0], int(proxyParts[1]))

                        px.updateProxy()
                        # print(line, flush=True)
                    else:
                        print("Odd line: " + line, flush=True)

                print("File processed and deleted: " + file)

                os.remove(join('ProxyFiles', file))
            time.sleep(60)
Beispiel #21
0
    def _runProxyMode(self, operations, args):
        """Handle the rest of the arguments and run in proxy mode.

        This method does not return.

        """
        from Proxy import Proxy
        import asyncore
        import socket
        if not len(args) == 3:
            usage("proxy mode requires exactly 3 arguments", 1)
        proxiedHost, proxiedPort, listeningPort = args
        try:
            Proxy(proxiedHost, 
                self._castToValidPort(proxiedPort), 
                self._castToValidPort(listeningPort),  
                self._composeOperations(operations),
                self._composeOperations(operations, True))
            asyncore.loop()
        except socket.error, e:
            stderr.write("%s: socket error: %s\n" % (Const.NAME, e.args[1]))
            exit(1)
Beispiel #22
0
from unittest import TestCase
import requests

from Proxy import Proxy

SIMPLE_TEXT_FILE = 'http://www.ida.liu.se/~TDTS04/labs/2011/ass2/goodtest1.txt'
SIMPLE_HTML_FILE = 'http://www.ida.liu.se/~TDTS04/labs/2011/ass2/goodtest2.html'
BAD_URL_FILE = 'http://www.ida.liu.se/~TDTS04/labs/2011/ass2/SpongeBob.html'
BAD_CONTENT_FILE = 'http://www.ida.liu.se/~TDTS04/labs/2011/ass2/badtest1.html'

SIMPLE_TEXT_CONTENT = b'\nThis is a plain text file with no bad words in it.\n\nYour Web browser should be able to display this page just fine.\n\n\n'
SIMPLE_HTML_CONTENT = b'<html>\n\n<title>\nGood HTML File Test for CPSC 441 Assignment 1\n</title>\n\n<body>\n<p>\nThis is a simple HTML file with no bad words in it.\n</p>\n\n<p>\nYour Web browser should be able to display this page just fine.\n</p>\n\n</body>\n\n</html>\n\n\n'


proxy = Proxy(1337)


class TestProxy(TestCase):
    @classmethod
    def setUpClass(cls):
        proxy.start()

    @classmethod
    def tearDownClass(cls):
        proxy.stop()

    def test_simple_text_file(self):
        response = requests.get(SIMPLE_TEXT_FILE, proxies=proxy.get_proxies())
        self.assertEqual(SIMPLE_TEXT_CONTENT, response.content)

    def test_simple_html_file(self):
Beispiel #23
0
    def __init__ (self, url, props={}):
        Dialog.__init__ (self, props.copy())

        scgi = get_scgi()
        self += Proxy (scgi.env['HTTP_HOST'], url)
Beispiel #24
0
#!/usr/bin/python
#-*-  coding:utf-8  -*-

from Proxy import Proxy

if __name__ == '__main__':
    p = Proxy("李雷", "李白", "韩梅梅")
    print(p)
    p.send_book()
    p.send_chocolate()
    p.send_flower()
Beispiel #25
0
import re
import time

from Proxy import Proxy

p = Proxy()
p.startProxy()

# Outputs all jpg urls that it encounters.
def hook(message):
     urls = re.findall('["\'][^"\']+\.jpe?g["\']', message)
     if urls:
            print urls
    
p.setHook(hook)

# Wait around forever to see the results
while True:
    time.sleep(1000)
Beispiel #26
0
 def revnotify(self, addr):
     self.successor = addr
     self.toString()
     pred = Proxy(self.predecessor.ip_addr, self.predecessor.port)
     pred.revnotify2(addr)
Beispiel #27
0
from Proxy import Proxy

if __name__ == '__main__':
    proxy = Proxy()

    while True:
        print('podaj a: ', end='')
        a = input()
        print('podaj b: ', end='')
        b = input()
        print('podaj c: ', end='')
        c = input()

        wynik = proxy.licz(int(a), int(b), int(c))
        if wynik != None:
            print(f'\n{wynik=}')
        else:
            print('\nBrak rozwiazan')

        x = input('Aby wyjsc wybierz 1\nBy kontynuowac wpisz dane != 1\n')
        if x == '1':
            break
Beispiel #28
0
def main():
    configFile = open("config.json", "r")
    config = JsonParser.JsonParser(configFile.read())
    proxy = Proxy(config.__dict__)
    proxy.acceptClients()
Beispiel #29
0
        print "*\tGATEWAY IP {} ({})\t*".format(self.args.gateway_ip,
                                                self.args.gateway_mac)
        print "*********************************************************\033[0m"

    def setup(self, conf):
        print("lol")


if __name__ == "__main__":
    if os.geteuid() != 0:
        sys.exit("Need root privileges to run properly; Re-run as sudo...")
    swrt = SWRT()
    if (swrt.args.conf != None):
        config = DNSConf.DNSConf(swrt.args.interface, swrt.args.conf)

    proxy = Proxy(swrt.args, config)
    dnsproxy = DnsProxy(config, swrt.args)
    try:
        ArpPoisoner(swrt.args)
        dnsproxy.go()
        proxy.go()
    except KeyboardInterrupt:
        proxy.stop()
        exit(1)
    except Exception as e:
        proxy.stop()
        exc_type, exc_value, exc_traceback = sys.exc_info()
        traceback.print_tb(exc_traceback, limit=1, file=sys.stdout)
        traceback.print_exception(exc_type,
                                  exc_value,
                                  exc_traceback,
Beispiel #30
0
import time
import random
import json
import time
import threading
from queue import Queue
from urllib.parse import quote
from Proxy import Proxy

headers = {
    'User-Agent':
    'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.162 Safari/537.36'
}

proxy = Proxy(
    "http://tpv.daxiangdaili.com/ip/?tid=557133875098914&num=1&delay=5&filter=on"
)
proxies = None


def get_proxy():
    global proxy
    return proxy.get_proxy()


def pause():
    '''
	延迟一到两秒
	'''
    sleepTime = random.randint(100, 200) * 1.0 / 1000
    # 延迟0.1-0.2秒