def run_tests(links, testLevel = 1, v = False) : if (links == [] or links == None) : bcolors.printFail("[-]Invalid input parameters! Exiting...") return proxyhandler = Proxy(True) proxyhandler.proxify() proxyhandler.validate_proxy() TestObjects = [] for link in links : sqlI = True # Used to check if we need to perform a second time-based sqlInjection test res = sql_error_scan(link, v) if(res != None) : TestObjects.append(TestObject.TestObject(link, "SQLi", res)) sqlI = False #Time based SQLi if (testLevel > 1 and sqlI ) : res = sql_time_scan(link, v) if(res != None) : TestObjects.append(TestObject.TestObject(link, "SQLiT", res)) #XSS if (testLevel > 2) : res = xss_vuln_scan(link, v) if(res != None) : TestObjects.append(TestObject.TestObject(link, "XSS", res)) if (v) : if (TestObjects != []) : bcolors.printGreen("~*~*~*~*~PRINTING FOUND TARGETS~*~*~*~*~") for t in TestObjects : t.print_test() else : bcolors.printFail("~*~*~*~*~NO TARGETS FOUND~*~*~*~*~") return TestObjects
def __init__(self, session): """<method internal="yes"> <summary> Constructor initializing a PlugProxy instance. </summary> <description> <para> This constructor creates and sets up a PlugProxy instance. </para> </description> <metainfo> <arguments> <argument maturity="stable"> <name>session</name> <type>SESSION</type> <description> session this instance belongs to </description> </argument> </arguments> </metainfo> </method> """ self.stack_proxy = None Proxy.__init__(self, session)
def __init__(self, session): """<method maturity="stable"> <summary> Constructor to initialize an AnyPy instance. </summary> <description> <para> This constructor initializes a new AnyPy instance based on arguments and calls the inherited constructor. </para> </description> <metainfo> <arguments> <argument maturity="stable"> <name>session</name> <type>SESSION</type> <description> session we belong to </description> </argument> </arguments> </metainfo> </method> """ Proxy.__init__(self, session)
def __init__(self, session): Proxy.__init__(self, session) log(self.session.session_id, CORE_SESSION, 5, "Proxy start") self._detector_config = OrderedDict(session.service.detector_config) self._detector_default_service_name = session.service.detector_default_service_name self.results = {}
def __init__(self, proxy: Proxy.Proxy): self._proxy = proxy # self.objectId = 0 proxy.hookPacket(Packets.HelloPacket, self.onHello) proxy.hookPacket(Packets.CreateSuccessPacket, self.onCreateSuccess) proxy.hookPacket(Packets.ReconnectPacket, self.onReconnect) proxy.hookPacket(Packets.FailurePacket, self.onFailure) proxy.hookCommand("reload", self.reloadPlugins)
def __init__(self, proxy: Proxy.Proxy): self._proxy = proxy self.spam_to_filter = [ 'realmbags', 'rpgstash', 'rotmgmax', 'realmstock', 'eye of oryx', "oryxin", "realm power.net", "rwtmg.com", "realmpower", "rqru", "rotmgstore" ] proxy.hookPacket(Packets.TextPacket, self.onText)
def notify(self, addr): self.predecessor = addr # Only two nodes in ring if not self.successor: self.successor = addr suc = Proxy(addr.ip_addr, addr.port) suc.notify(self.address) self.sucsuccessor = suc.getsucc() self.toString()
def find_success(self, id): # if we are the only node in the ring if self.is_root == 1 and not self.successor: return self.address else: # if we are the successor if self.inbetween(id, self.address.NODEID, self.successor.NODEID): return self.successor # else ask our successor else: suc = Proxy(self.successor.ip_addr, self.successor.port) return suc.find_successor(id)
def __init__(self, province_name, log=None): """ 初始化代理,链接Session,ua :return: """ self.ua = random.choice(user_agent) self.ss = Session() self.pro_name = province_name.lower() self.proxy_c = Proxy(self.pro_name) self.proxyInit() self.correct_http = 0 self.error_http = 0 self.log = log self.proxy = self.proxySet
class EmptyAI: def __init__(self, ip, port): self.game = Proxy(ip, port, self) def think(self): # You're free to do everything you want here while True: self.game.update_sim_frame() for plane in self.game.get_my_planes(): self.game.send_command(WaitCommand(plane)) def end(self): pass
def __init__(self, useproxy, retries=None, verbose=False, sleep=5): self.urls = [] # contains scraped urls self.blacklist = [] # contains blacklisted proxies self.useproxy = useproxy # dictates use of proxy self.retries = retries # sets the number of search retries, if None => unlimited self.verbose = verbose # sets verbosity level self.sleep = sleep # dictates sleep while searching for urls self.cookie_jar = cookie_jar = http.cookiejar.CookieJar() self.proxyhandler = None if (self.useproxy): self.proxyhandler = Proxy(self.verbose) self.proxyhandler.proxify() if (self.verbose): bcolors.printGreen("[+]Search object created!")
def inherit(self): suc = Proxy(self.successor.ip_addr, self.successor.port) s_list = suc.getindexfile() # for every item in our successor for i in s_list: # if it falls into our range if self.inbetween(i.NODEID, self.predecessor.NODEID, self.address.NODEID): # add file to our list self.indexfile.append(i) print("Inherited %s from %s\n" % (i.filename, self.successor.hostname)) # remove the file from our successor's list suc.removefile(i)
def main(): max_connection = None port = None for option in sys.argv[1:]: optionName, val = option.split('=') if optionName == 'max_connection': max_connection = int(val) elif optionName == 'port': port = int(val) proxy = Proxy(max_connection=max_connection, port=port) print('Main:: proxy program starts') CacheHandler.origin = os.getcwd() proxy.listenConnection() print('Main:: proxy program ends')
def get_proxy_from_free_proxy(): url = 'https://free-proxy-list.net/' header = { 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36' } page = request = requests.get(url, headers=header) soup = BeautifulSoup(page.text, 'html.parser') table = soup.find('table') tbody = table.find('tbody') rows = tbody.find_all('tr') proxyset = set() for row in rows: cols = row.find_all('td') ip = cols[0].text port = cols[1].text code = cols[2].text country = cols[3].text anon = cols[4].text https = cols[6].text time = cols[7].text if https == 'no': proxy = Proxy(ip, port, anon, country, '') # ip, port, anon, country, iso proxyset.add(proxy) return list(proxyset)
def get_proxy_list(): try: url = 'https://www.proxy-list.download/api/v0/get?l=en&t=http' request = requests.get(url) json_lst = request.json() except: print('Não foi possível baixar lista de proxies...') exit() #pprint(json) DICT = json_lst[0] UPDATED = DICT.get('UPDATED') UPDATEDAV = DICT.get('UPDATEDAV') TOTAL = DICT.get('TOTAL') PAISES = DICT.get('PAISES') LISTA = DICT.get('LISTA') # CARREGA LISTA DE PROXIES # RETORNA UMA LISTA DE OBJETOS proxyset = set() for server in LISTA: proxy = Proxy(server.get('IP'), server.get('PORT'), server.get('ANON'), server.get('COUNTRY'), server.get('ISO')) #print('adicionado=',proxy)s proxyset.add(proxy) return list(proxyset)
def test_proxy(proxy): test_times = 5 v = db_public.proxy2_select2(proxy) if not v: return True proxy = v[0] count = v[1] lost = v[2] total = v[3] url = 'http://www.baidu.com' # 测试有没有网 if Proxy.Http.http_get('http://www.baidu.com', BaiduHeader) is None: return False # 测试proxy nlost = 0 bt = time.time() for x in range(0, test_times): if Proxy.test_get(proxy, url, BaiduHeader) is None: nlost += 1 et = time.time() total += et - bt count += test_times lost += nlost db_public.proxy2_insert_or_update(proxy, count, lost, total) print('proxy={}, loss rate={}, avg={}, count={} lost={} '.format( proxy, nlost * 1.0 / test_times, (et - bt) / test_times, test_times, nlost)) # print('proxy={}, loss rate={}, avg={}, count={} lost={} '.format(proxy, lost*1.0/count, total/count, count, lost)) return True
def _check_proxy(self, ip, port, result): proxy = Proxy(ip, port) self._evaluate_responsiveness(proxy) proxy.transparency = -1 if proxy.responsiveness < self._max_responsiveness: self._evaluate_transparency(proxy) proxy.last_checked = str(datetime.now()) result.append(proxy) with self._mutex: self._total -= 1 if self._total == 0: self._running_semaphore.release() self._active_threads.release() return
def __init__(self, session): """<method maturity="stable" internal="yes"> <summary> Constructor to initialize a TelnetProxy instance. </summary> <description> <para> This function initializes a TelnetProxy instance by calling the inherited __init__ constructor with appropriate parameters. </para> </description> <metainfo> <arguments/> </metainfo> </method> """ Proxy.__init__(self, session)
def find_new_proxy(tn=0): ips = [] if tn == 0: ips = Proxy.get_ip_list_ip66() if tn == 1: ips = Proxy.get_ip_list_xc(0, 1) if tn == 2: ips = Proxy.get_ip_list_xc(1, 1) if tn == 3: ips = Proxy.get_ip_list_xc(2, 1) if tn == 4: ips = Proxy.get_ip_list_xc(3, 1) if tn == 5: ips = Proxy.get_ip_list_89ip(100) if not ips: return True url = 'http://www.baidu.com' # 测试有没有网 if Proxy.Http.http_get('http://www.baidu.com', BaiduHeader) is None: return True test_times = 5 for proxy in ips: info = db_public.proxy2_select2(proxy) if info: continue lost = 0 count = 0 total = 0.0 if Proxy.test_get(proxy, url, BaiduHeader) is None: continue # 测试proxy bt = time.time() for x in range(0, test_times): if Proxy.test_get(proxy, url, BaiduHeader) is None: lost += 1 et = time.time() total += et - bt count += test_times print('proxy={}, loss rate={}, avg={}, count={} lost={} '.format( proxy, lost * 1.0 / test_times, (et - bt) / test_times, test_times, lost)) # 丢包了 if lost >= 1: continue # 时间长 if total > 3: continue db_public.proxy2_insert_or_update(proxy, count, lost, total) return True
def get_page(url): headers = { 'user-agent': 'Mozilla/4.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3904.97 Safari/537.36' } proxy_manager = Proxy() proxy_list = proxy_manager.get_proxy() session = requests.Session() session.headers.update(headers) while True: try: proxy = proxy_list[random.randint(0, len(proxy_list) - 1)] session.proxies = proxy result = session.get(url) result.encoding = 'cp-1250' result = result.text return result except Exception: print('IP blocked or other error, ', )
def deep_iter_proxy(arg): if isinstance(arg, Proxy): proxys.add(arg) elif isinstance(arg, basestring): try: proxy = Proxy(arg) proxys.add(proxy) except ProxyException: pass elif isinstance(arg, collections.Iterable): for item in arg: deep_iter_proxy(item)
def get_list_enable_proxy(): res_list = [] data = subprocess.check_output( ['iptables', '-t', 'nat', '-L', '--line-numbers', '-n']) data = data.decode('utf-8') lines_data = data.split('\n') for line in lines_data: if 'dpt' in line: line = search_ip_port(line) params = line.split(':') res_list.append(Proxy(params[1], params[2], int(params[0][:-3]))) return res_list
def run(self): while True: fileList = [f for f in listdir('ProxyFiles') if isfile(join('ProxyFiles', f)) and f != '.gitkeep'] for file in fileList: print("New file ready for processing: " + file) lines = [line.strip() for line in open(join('ProxyFiles', file))] for line in lines: if re.match('^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d{1,5}$', line) is not None: proxyParts = line.split(":") px = Proxy(proxyParts[0], int(proxyParts[1])) px.updateProxy() # print(line, flush=True) else: print("Odd line: " + line, flush=True) print("File processed and deleted: " + file) os.remove(join('ProxyFiles', file)) time.sleep(60)
def rec_ret(self, file, filename): print "Looking for file %s" % filename # if we have the file if self.checkfile(file): print("Found!\n") return self.address # if we don't have the file else: print "I don't have file %s" % filename # if we have a successor, forward the search to our successor if self.successor: if self.is_root == 1: print "Forwarding the search to %s...\n" % self.successor.hostname suc = Proxy(self.successor.ip_addr, self.successor.port) return suc.recursive(file, filename) else: # if the search makes one full circle and comes back to the root, it means file does not exist if self.successor.ip_addr == self.root_addr.ip_addr and self.successor.port == self.root_addr.port: print "There is no %s in the ring...\n" % filename return None else: print "Forwarding the search to %s...\n" % self.successor.hostname suc = Proxy(self.successor.ip_addr, self.successor.port) return suc.recursive(file, filename) # if we don't have a successor it means the file is not in the ring else: print "There is no %s in the ring...\n" % filename return None
def run_tests(links, testLevel=1, v=False): if (links == [] or links == None): bcolors.printFail("[-]Invalid input parameters! Exiting...") return proxyhandler = Proxy(True) proxyhandler.proxify() proxyhandler.validate_proxy() TestObjects = [] for link in links: sqlI = True # Used to check if we need to perform a second time-based sqlInjection test res = sql_error_scan(link, v) if (res != None): TestObjects.append(TestObject.TestObject(link, "SQLi", res)) sqlI = False #Time based SQLi if (testLevel > 1 and sqlI): res = sql_time_scan(link, v) if (res != None): TestObjects.append(TestObject.TestObject(link, "SQLiT", res)) #XSS if (testLevel > 2): res = xss_vuln_scan(link, v) if (res != None): TestObjects.append(TestObject.TestObject(link, "XSS", res)) if (v): if (TestObjects != []): bcolors.printGreen("~*~*~*~*~PRINTING FOUND TARGETS~*~*~*~*~") for t in TestObjects: t.print_test() else: bcolors.printFail("~*~*~*~*~NO TARGETS FOUND~*~*~*~*~") return TestObjects
def __init__(self, useproxy, retries = None, verbose = False, sleep = 5): self.urls = [] # contains scraped urls self.blacklist = [] # contains blacklisted proxies self.useproxy = useproxy # dictates use of proxy self.retries = retries # sets the number of search retries, if None => unlimited self.verbose = verbose # sets verbosity level self.sleep = sleep # dictates sleep while searching for urls self.proxyhandler = None if (self.useproxy) : self.proxyhandler = Proxy(self.verbose) self.proxyhandler.proxify() if (self.verbose) : bcolors.printGreen("[+]Search object created!")
def __init__(self, session): """<method maturity="stable" internal="yes"> <summary> Initialize a Pop3Proxy instance. </summary> <description> <para> Create and set up a Pop3Proxy instance. </para> </description> <metainfo> <arguments> <argument> <name>session</name> <type>SESSION</type> <description> session this instance belongs to </description> </argument> </arguments> </metainfo> </method> """ Proxy.__init__(self, session)
def __init__(self, session): """<method maturity="stable" internal="yes"> <summary> Constructor to initialize a WhoisProxy instance. </summary> <description> <para> This constructor creates and set up a WhoisProxy instance. </para> </description> <metainfo> <arguments> <argument maturity="stable"> <name>session</name> <type>SESSION</type> <description> session this instance belongs to </description> </argument> </arguments> </metainfo> </method> """ Proxy.__init__(self, session)
def __init__(self, session): """<method internal="yes"> <summary> Constructor to initialize a FingerProxy instance. </summary> <description> <para> This constructor creates and set up a FingerProxy instance. </para> </description> <metainfo> <arguments> <argument internal="yes"> <name>session</name> <type>SESSION</type> <description> session this instance belongs to </description> </argument> </arguments> </metainfo> </method> """ Proxy.__init__(self, session)
def get_proxy_from_clarketm(): try: url = 'https://raw.githubusercontent.com/clarketm/proxy-list/master/proxy-list-raw.txt' txtlist = requests.get(url).text except: print('Não foi possível baixar lista de proxies do Clark...') exit() else: proxyset = set() lista = txtlist.split() for proxy in lista: ip = proxy.split(':')[0] port = proxy.split(':')[1] proxy = Proxy(ip, port, '', '', '') proxyset.add(proxy) return list(proxyset) # set não tem indice, melhor converter em lista
def add_proxys(list): """ 添加proxy 到sqlite数据库 :param list: 代理列表 :return: """ session = conn(PROXYDB) for p in list: type = "socks5" try: proxy = Proxy(type=type, ip_port=p, failnum=0) session.add(proxy) session.commit() except Exception, e: print e pass
def periodical(self): while 1: # check every 3 seconds time.sleep(3) if self.successor: if not self.ping(self.successor): # fix the broken segment by reassigning pointers print("%s failed: Stabilizing...\n" % self.successor.hostname) if self.successor.NODEID == self.predecessor.NODEID: self.reset() self.toString() else: self.successor = self.sucsuccessor sucsuc = Proxy(self.sucsuccessor.ip_addr, self.sucsuccessor.port) self.sucsuccessor = sucsuc.getsucc() sucsuc.notify(self.address) pred = Proxy(self.predecessor.ip_addr, self.predecessor.port) pred.revnotify2(self.successor) self.toString()
def find_proxy(): print('start proxy search') base_url = 'http://spys.one/proxies/' temp_proxies = [] for i in range(5): url = base_url + str(i) + "/" driver.get(url) html = driver.page_source soup = BeautifulSoup(html, features="lxml") table = soup.select("table")[2] rows = table.select("tr") for row in rows: cols = row.select("td") if len(cols) > 4: if cols[0].text != 'Proxy адрес:порт': host_string = cols[0].select("font")[1].text split_by_colon = host_string.split(':') host = split_by_colon[0].split('document')[0] port = split_by_colon[2] proxy_type = str(cols[1].select("font")[0].contents[0]) latency = cols[3].select("font")[0].text country = str(cols[4].select("font")[0].contents[0]) proxy = Proxy(host, port, proxy_type, latency, country) temp_proxies.append(proxy) proxies[ProxyType.HTTP].clear() proxies[ProxyType.SOCKS5].clear() for proxy_item in temp_proxies: item_proxy_type = proxy_item.proxy_type.upper() if item_proxy_type == ProxyType.HTTP.value: proxies[ProxyType.HTTP].append(proxy_item) elif item_proxy_type == ProxyType.SOCKS5.value: proxies[ProxyType.SOCKS5].append(proxy_item) proxies[ProxyType.HTTP].sort() proxies[ProxyType.SOCKS5].sort() print(str(len(proxies[ProxyType.HTTP])) + ' HTTP proxies found') print(str(len(proxies[ProxyType.SOCKS5])) + ' SOCKS proxies found')
def run(self): # Create threads that will be constantly filled print('ProxyUpdater started') proxyList = [] threadsList = [] for i in range(self.numberOfThreads): t = threading.Thread(target=ProxyUpdater._worker, args=( i, proxyList, )) threadsList.append(t) t.start() print('Thread ' + str(i) + ' started') while True: # Append more items to the list when needed if len(proxyList) < 100: print('100 more items added') proxyList.extend(Proxy.getProxyBag(100))
def test_proxy_post(proxy=''): proxys = db_public.proxy2_select() for v in proxys: proxy = v[0] count = v[1] lost = v[2] total = v[3] url = 'http://www.baidu.com' # 测试有没有网 if Proxy.Http.http_get('http://www.baidu.com', BaiduHeader) is None: return 0 # 测试proxy bt = time.time() for x in range(0, 5): if Proxy.test_post(proxy, url, {}, BaiduHeader) is None: lost += 1 et = time.time() total += et - bt count += 5 db_public.proxy2_insert_or_update(proxy, count, lost, total) return
def __init__(self, proxy_list): Proxy.__init__(self, None) self._current_proxy = None self._proxy_list = proxy_list self._proxy_counter = 0 self._last_proxy_index = len(proxy_list)
from Proxy import Proxy if __name__ == '__main__': p = Proxy() p.work()
import re import time from Proxy import Proxy p = Proxy() p.startProxy() # Outputs all jpg urls that it encounters. def hook(message): urls = re.findall('["\'][^"\']+\.jpe?g["\']', message) if urls: print urls p.setHook(hook) # Wait around forever to see the results while True: time.sleep(1000)
def __init__(self, ip, port): self.game = Proxy(ip, port, self)
def __init__(self, session): Proxy.__init__(self, session) log(self.session.session_id, CORE_SESSION, 5, "Proxy start") self._detector_config = session.service.detector_config self.results = {}
class Search : #Initializes variables def __init__(self, useproxy, retries = None, verbose = False, sleep = 5): self.urls = [] # contains scraped urls self.blacklist = [] # contains blacklisted proxies self.useproxy = useproxy # dictates use of proxy self.retries = retries # sets the number of search retries, if None => unlimited self.verbose = verbose # sets verbosity level self.sleep = sleep # dictates sleep while searching for urls self.cookie_jar = cookie_jar = http.cookiejar.CookieJar() self.proxyhandler = None if (self.useproxy) : self.proxyhandler = Proxy(self.verbose) self.proxyhandler.proxify() if (self.verbose) : bcolors.printGreen("[+]Search object created!") def print_state(self) : bcolors.printBold("****Printing object state****") bcolors.printBold("URLs:\n") print(str(self.urls)) bcolors.printBold("Blacklist:\n") print(str(self.blacklist)) bcolors.printBold("Settings:\n") print("Retries: " + str(self.retries) + ", verbose: " + str(self.verbose) + ", sleep: " + str(self.sleep)) def print_urls(self) : bcolors.printBold("****PRINTING URLS****\n") for url in self.urls : print(str(url)) # Returns the HTML page of a website. # It incorporates error checking and retries # If an unknown error was raised we call the fatal_exception() method def get_html(self, url) : if (self.useproxy) : self.proxyhandler.validate_proxy() req = urllib.request.Request(url, None, data.headers) tries = 0 while (self.retries == None or tries < self.retries): try : res = urllib.request.urlopen(req) self.cookie_jar.extract_cookies(res, req) src = res.read() break except urllib.error.HTTPError as e: if (e.code != 503) : bcolors.printFail("[-]HTTP Error " + str(e) + " was raised!") return None if (self.useproxy) : if (self.verbose) : bcolors.printWarning("[*]503 Error raised when acquiring search results! Updating proxy...") self.update_proxy() # If we have to retry, append current proxy to blacklist if (self.useproxy) : # blacklists both proxies if error occured! self.proxyhandler.blacklist_current_proxy(True) tries += 1 return html.fromstring(str(src)) def update_proxy(self, https=False) : self.proxyhandler.proxify(https, True) self.proxyhandler.validate_proxy() def fatal_exception(self,e = None, function_name = None) : bcolors.printFail("A fatal exception has occured!") if (not e == None) : print(str(e)) if (not function_name == None) : print(str(function_name)) bcolors.printBold("****PROGRAM STATE****") self.print_state() sys.exit(0)