def getPoiPhottoList( self, mddid, poiid, page, default_url="http://www.mafengwo.cn/mdd/ajax_photolist.php"): """ :param mddid: :param poiid: :return: """ try_times = 5 while try_times > 0: try: proxy_new = proxy.Proxy() _proxie = proxy_new.getProxyByTxt() data = { 'act': 'getPoiPhotoList', 'mddid': str(mddid).strip('\n'), 'poiid': str(poiid).strip('\n'), 'page': str(page).strip('\n') } context = requests.get(default_url, proxies=_proxie, params=data, timeout=5, headers=self.headers) context.encoding = 'utf-8' return context.text except Exception, e: print "出现问题 line", LINE.f_lineno, "错误类型", e print '倒计时次数:', try_times try_times -= 1 pass
def secure_proxy_url(tls_certificate_pem_path): """Return the URL of an instance of a running secure proxy. This fixture also spawns that instance and tears it down after the test. """ proxypy_args = [ "--threadless", # use asyncio "--num-workers", "1", # the tests only send one query anyway "--hostname", "127.0.0.1", # network interface to listen to "--port", 0, # ephemeral port, so that kernel allocates a free one "--cert-file", tls_certificate_pem_path, # contains both key and cert "--key-file", tls_certificate_pem_path, # contains both key and cert ] with proxy.Proxy(input_args=proxypy_args) as proxy_instance: yield URL.build( scheme="https", host=str(proxy_instance.flags.hostname), port=proxy_instance.flags.port, )
def get_image_post(self, iMddid, iPage, iTagId, default_url="http://www.mafengwo.cn/ajax/router.php"): """ 返回页面 :param iMddid: :param iPage: :param iTagId: :param default_url: :return: """ try_times = 5 while try_times > 0: try: proxy_new = proxy.Proxy() _proxie = proxy_new.getProxyByTxt() data = { 'sAct': 'KMdd_StructWebAjax|GetPoisByTag', 'iMddid': str(iMddid).strip('\n'), 'iTagId': str(iTagId).strip('\n'), 'iPage': str(iPage).strip('\n') } context = requests.post(default_url, proxies=_proxie, timeout=5, data=data, headers=self.headers) return context.text except Exception, e: print "出现问题 line", LINE.f_lineno, "错误类型", e print '倒计时次数:', try_times try_times -= 1 pass
def compat_proxy_newid(self): Proxy = proxy.Proxy() Proxy.SetDefaultProxy r = compat_get('http://my-ip.herokuapp.com/') resp = r.text sp = resp.replace('\n','') default_ip = (((sp.split(':')[-1]).replace('}','')).replace('"','')).replace(' ','') print compat_color.fg + compat_color.sn + "["+compat_strftime("%H:%M:%S")+"] [INFO] TOR: configuring tor proxy..." compat_sleep(1) print compat_color.fg + compat_color.sn + "["+compat_strftime("%H:%M:%S")+"] [INFO] TOR: requesting new identity..." compat_sleep(1) _resp = Proxy.NewIdentity compat_sleep(1) if '250 OK' in _resp: print compat_color.fg + compat_color.sn + "["+compat_strftime("%H:%M:%S")+"] [INFO] TOR: request was successfull." Proxy.ConfigureProxy else: print compat_color.fg + compat_color.sn + "["+compat_strftime("%H:%M:%S")+"] [INFO] TOR: request was unsuccessfull." r = compat_get('http://my-ip.herokuapp.com/') resp = r.text sp = resp.replace('\n','') proxy_ip = (((sp.split(':')[-1]).replace('}','')).replace('"','')).replace(' ','') if default_ip != proxy_ip: print compat_color.fg + compat_color.sn + "["+compat_strftime("%H:%M:%S")+"] [INFO] TOR: proxy configured successfully." print compat_color.fg + compat_color.sn + "["+compat_strftime("%H:%M:%S")+"] [INFO] TOR: network traffic will go through : (%s)" % (proxy_ip) else: print compat_color.fg + compat_color.sn + "["+compat_strftime("%H:%M:%S")+"] [INFO] TOR: proxy configuration is failed"
def handle(self, client): callback = SWProxyCallback() proc = proxy.Proxy(client, callback) proc.daemon = True proc.start() logger.debug('Started process {} to handle connection {}'.format( proc, client.conn))
def handle(self, client): callback = ProxyCallback() proc = proxy.Proxy(client, callback) proc.daemon = True proc.start() logger.debug('Started process %r to handle connection %r' % (proc, client.conn))
def get_gonglve(self): """ :return: """ proxy_new = proxy.Proxy() _proxie = proxy_new.getProxyByTxt() _html = None # 读取city_info信息 for lines in codecs.open('city_info', 'r', encoding='utf-8'): status_ma = lines.split('\t')[-1] try_times = 5 while try_times > 0: try: url = os.path.join( os.path.join(self.gonglve_url, unicode(str(status_ma).strip('\n'))), "gonglve.html") res = requests.get(url=url, proxies=_proxie, timeout=5, headers=self.headers) print res.status_code, status_ma, url if res.status_code == status_code_not_found: status_ma += 1 continue if res.status_code == status_code_forbid: continue self.get_poi(html=res.text, iMddid=status_ma) break except Exception, e: print "出现问题 line", LINE.f_lineno, "错误类型", e print '尝试倒计次数:', try_times try_times -= 1 pass
def compat_proxy_connect(self): Proxy = proxy.Proxy() Proxy.SetDefaultProxy r = compat_get('http://my-ip.herokuapp.com/') resp = r.text sp = resp.replace('\n', '') default_ip = (((sp.split(':')[-1]).replace('}', '')).replace( '"', '')).replace(' ', '') print compat_color.fg + compat_color.sn + "[" + compat_strftime( "%H:%M:%S") + "] [INFO] TOR: configuring tor proxy " Proxy.ConfigureProxy try: r = compat_get('http://my-ip.herokuapp.com/') except: print compat_color.fr + compat_color.sn + "[" + compat_strftime( "%H:%M:%S" ) + "] [INFO] TOR: proxy connection error, make sure tor services are running..." Proxy.SetDefaultProxy resp = r.text sp = resp.replace('\n', '') proxy_ip = (((sp.split(':')[-1]).replace('}', '')).replace( '"', '')).replace(' ', '') if default_ip != proxy_ip: print compat_color.fg + compat_color.sd + "[" + compat_strftime( "%H:%M:%S") + "] [INFO] TOR: proxy configured successfully." print compat_color.fg + compat_color.sd + "[" + compat_strftime( "%H:%M:%S" ) + "] [INFO] TOR: network traffic will go through : (%s)\n" % ( proxy_ip) else: print compat_color.fr + compat_color.sb + "[" + compat_strftime( "%H:%M:%S") + "] [INFO] TOR: proxy configuration is failed\n"
def secure_proxy_url(monkeypatch, tls_certificate_pem_path): """Return the URL of an instance of a running secure proxy. This fixture also spawns that instance and tears it down after the test. """ proxypy_args = [ "--threadless", # use asyncio "--num-workers", "1", # the tests only send one query anyway "--hostname", "127.0.0.1", # network interface to listen to "--port", 0, # ephemeral port, so that kernel allocates a free one "--cert-file", tls_certificate_pem_path, # contains both key and cert "--key-file", tls_certificate_pem_path, # contains both key and cert ] class PatchedAccetorPool(proxy.core.acceptor.AcceptorPool): def listen(self): super().listen() self.socket_host, self.socket_port = self.socket.getsockname()[:2] monkeypatch.setattr(proxy.proxy, "AcceptorPool", PatchedAccetorPool) with proxy.Proxy(input_args=proxypy_args) as proxy_instance: yield URL.build( scheme="https", host=proxy_instance.acceptors.socket_host, port=proxy_instance.acceptors.socket_port, )
def run(self): print("启动主程序") p = proxy.Proxy(self._useproxy) p.ConfigureTor print('爬取代理开始运行') t = Spider_Socks(self._useproxy) t.run() print('继续运行Tor')
def main(setup, error): sys.stderr = file(error, 'a') for line in file(setup): parts = line.split() proxy.Proxy(('', int(parts[2])), (parts[0], int(parts[1]))).start() lock = thread.allocate_lock() lock.acquire() lock.acquire()
def run_proxy(): logger = getLogger("proxy") hub = proxy.Proxy() logger.info("Starting proxy service") hub.start() logger.info("Stopping proxy service") hub.cleanup()
def Deserialize(self, fin): oid = serialize.LongSerializer.Deserialize(fin) addr = tuple(serialize.SequenceSerializer.Deserialize(fin)) if addr == self.srv.addr: try: return self.srv.omap[oid] except KeyError: raise ValueError('Bad OID in serialized data') else: return proxy.Proxy( RemoteReference(self.srv, self.srv.GetClient(addr), oid))
def create_file_path(self, img_url, item_id, sub_file_path=None): """ purpose:创建本地保存文件路径 input:img_url output:文件路径 """ file_name = '' rest_img_url = '' try_times = 5 filesize = 1024 while try_times > 0: try: rest_img_url = unicode(img_url[0]) segs = urlparse.urlparse(img_url[0]) if rest_img_url.find("http") == -1: rest_img_url = 'https://' + rest_img_url.lstrip('/') img_name = rest_img_url.split("/")[-1] print "解析的名称为:", img_name file_path = os.path.join( self.dest_dir if sub_file_path is None else sub_file_path, img_name) print file_path if os.path.exists(file_path): # 如果存在不再下载 filesize = os.path.getsize(file_path) if filesize > 1024: print '已经存在该文件:', file_path return path = '/'.join(file_path.split('/')[:-1]) isExists = os.path.exists(path) if not isExists: os.makedirs(path) proxy_new = proxy.Proxy() _proxie = proxy_new.getProxyByTxt() r = requests.get(rest_img_url, proxies=_proxie, headers=self.headers, timeout=50) import codecs with codecs.open(file_path, "wb") as code: code.write(r.content) filesize = os.path.getsize(file_path) if filesize < 1024: print '图片为空尝试第', try_times, '次下载' try_times -= 1 continue break except Exception, e: print '执行异常', file_path, rest_img_url, e print '尝试第', try_times, '次下载' try_times -= 1 # os.remove(file_name) pass
def mainloop(self, interactive=False, namespace=globals()): '''Starts main loop ''' # Start timers for i in range(len(self._timer_stack)): def func(index): handler, fps = self._timer_stack[index] t = glut.glutGet(glut.GLUT_ELAPSED_TIME) dt = (t - self._timer_date[index])/1000.0 self._timer_date[index] = t handler(dt) glut.glutTimerFunc(int(1000./fps), func, index) self._timer_date[index] = glut.glutGet(glut.GLUT_ELAPSED_TIME) fps = self._timer_stack[i][1] glut.glutTimerFunc(int(1000./fps), func, i) # Start idle only if necessary for item in self._event_stack: if 'on_idle' in item.keys(): glut.glutIdleFunc(self._idle) self.dispatch_event('on_init') # Starts non-interactive mode if not interactive: glut.glutMainLoop() sys.exit() # Starts interactive mode # Save tty mode on linux/darwin if sys.platform in ['linux2', 'darwin']: self.term_state = termios.tcgetattr(sys.stdin) namespace = namespace.copy() for key in namespace.keys(): f = namespace[key] if key[:2] == 'gl' and isinstance(namespace[key], _ctypes.CFuncPtr): namespace[key] = proxy.Proxy(f,self) def session_start(): self.shell = IPython.ipapi.make_session(namespace) self.shell.IP.interact() #mainloop() sys.exit() self.session = threading.Thread(target=session_start) self.session.start() @atexit.register def goodbye(): self.shell.IP.ask_exit() # Restore tty state on linux/darwin if sys.platform in ['linux2', 'darwin']: termios.tcsetattr(sys.stdin, termios.TCSADRAIN, self.term_state) sys.stdout.write('\n') glut.glutTimerFunc(100, self._pop, 0) glut.glutMainLoop()
def advapi32_regdeletevaluea_handler(hookcall): print_debug("advapi32.dll.RegDeleteValueA() called!") myproxy = proxy.Proxy() if hookcall.params[1] != 0: valuename = myproxy.readasciiz(hookcall.params[1]) print_debug("valuename" + valuename) logf("-- advapi32.dll.RegDeleteValueA( %Xh, %Xh )\r\n" % (hookcall.params[0], hookcall.params[1])) if hookcall.params[1] != 0: logf("valuename: " + valuename + "\r\n") hookcall.sendack()
def advapi32_regsetvalueexa_handler(hookcall): print_debug("advapi32.dll.RegSetValueExa() called!") myproxy = proxy.Proxy() if hookcall.params[1] != 0: valuename = myproxy.readasciiz(hookcall.params[1]) print_debug("valuename: " + valuename) logf("-- advapi32.dll.RegSetValueExa( %Xh, %Xh, %Xh, %Xh, %Xh, %Xh)\r\n" % (hookcall.params[0], hookcall.params[1], hookcall.params[2], hookcall.params[3], hookcall.params[4], hookcall.params[5])) if hookcall.params[1] != 0: logf("valuename: " + valuename + "\r\n") hookcall.sendack()
def main(): error_objects = (errors.BaseException, errors.ConfigError, errors.ModuleImportError, errors.DriverError) try: bms = proxy.Proxy(CONFIG_FILE) except error_objects as e: print(f'Error: {e}') return bms.start_simulators()
def sendto_handler(hookcall): global allocated_buffers myproxy = proxy.Proxy() # free previously allocated buffers, if they exists for addrs in allocated_buffers: print "freeing address %x" % addrs myproxy.freememory(addrs) #empty list of addresses to free allocated_buffers = [] p_socket = hookcall.params[0] p_data = hookcall.params[1] p_datasize = hookcall.params[2] p_flags = hookcall.params[3] p_pto = hookcall.params[4] p_tolen = hookcall.params[5] print "Socket: %x" % p_socket print "DataSize %xh (%d)" % (p_datasize, p_datasize) data = myproxy.readmemory(p_data, p_datasize) (new_lendata, new_data) = runHookHexEditor(len(data), data) if new_lendata == p_datasize: myproxy.writememory(p_data, new_lendata, new_data) else: print "modified packet is bigger than original one!" print "allocating memory on intercepted process for new buffer.." print "NewSize: %X (%d)" % (new_lendata, new_lendata) (retcode, addr) = myproxy.allocmemory(new_lendata) if retcode == 1: print "retcode: %d" % retcode print "addr: %X" % addr myproxy.writememory(addr, new_lendata, new_data) pesp = hookcall.regs['esp'] print "ESP of process: %X" % pesp print "Changing ptr to buff and len parameters on stack.." myproxy.writememory(pesp + 8, 4, struct.pack("L", addr)) myproxy.writememory(pesp + 0xC, 4, struct.pack("L", new_lendata)) # add the allocated buffer to our list to free it later allocated_buffers.append(addr) else: print "allocation failed!" print "done!" hookcall.sendack() return
def __init__(self, totalPageNum=1, image_path='/data2/xijun.gong/jd_image_data'): """ :param totalPageNum: 下载页数 :param image_path: 图片放置目录 """ self.download = download; self.proxy_new = proxy.Proxy() self.image_path = image_path self.__amount = totalPageNum * maxImageNum + self.__start_amount self.headers = { 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.94 Safari/537.36', 'Qunar-App': 'SQxTLo6t4k5HSnsykL7nuz9jL/2FvrM9QfppVEbOhQYxIS5tR6I/w3GIq9wpZLbur3Hw7W//Ec+nFnorxB7gTlSSND1Xrbaj3zmRkWAZUaiRm+djpINDhvsYOXlFZHlrQ0BPZ+uZRIn5xnSAfPTpW1xJehqHDr1769Xs0Ly8rZM=' }
def entry(self): """ 需要扫描的html的入口 :param url: :return: 使用代理返回wrapper部分代码 """ proxy_new = proxy.Proxy() _proxie = proxy_new.getProxyByTxt() _html = None # 读取city_info信息 status_ma = 26679 try_times = 10 while True: try: if try_times < 1: try_times = 10 print '放弃 ', status_ma, ' 字段进入下一次字段更新' status_ma += 1 url = os.path.join(self.root_url, str(status_ma) + ".html") res = requests.get(url=url, proxies=_proxie, timeout=5, headers=self.headers) print res.status_code, status_ma, url if res.status_code == status_code_not_found: status_ma += 1 try_times = 10 continue if res.status_code == status_code_forbid: continue city_info = self.get_travel_scenic_spot(html=res.text) if city_info is None: print "出现问题的url:", url, "出现问题的原因:", "没有获取到city_info的相关信息" status_ma += 1 try_times = 10 continue print 'city_info:', city_info fp = codecs.open("city_info", 'a+', encoding='utf-8') fp.writelines(city_info) # 保存到文本中 fp.writelines("\t") fp.writelines(str(status_ma)) fp.writelines("\n") fp.close() status_ma += 1 try_times = 10 except Exception, e: print "出现问题 line", LINE.f_lineno, "错误类型", e print '倒计时次数:', try_times try_times -= 1 _proxie = proxy_new.getProxyByTxt() pass
def advapi32_regopenkeyexa_handler(hookcall): print_debug("advapi32.lib.RegOpenKeyExA() called!") myproxy = proxy.Proxy() if hookcall.params[1] != 0: buffer = myproxy.readasciiz(hookcall.params[1]) print_debug("subkey: " + buffer) logf( "-- advapi32.lib.RegOpenKeyExA( %Xh, %Xh, %Xh, %Xh, %Xh) ---------------------------\r\n" % (hookcall.params[0], hookcall.params[1], hookcall.params[2], hookcall.params[3], hookcall.params[4])) if hookcall.params[1] <> 0: logf("subkey: " + buffer + "\r\n") hookcall.sendack()
def entry_point() -> None: with proxy.Proxy( enable_web_server=True, port=9000, # NOTE: Pass plugins via *args if you define custom flags. # Currently plugins passed via **kwargs are not discovered for # custom flags by proxy.py # # See https://github.com/abhinavsingh/proxy.py/issues/871 plugins=[ 'app.plugins.MyWebServerPlugin', 'app.plugins.MyProxyPlugin', ], ) as _: proxy.sleep_loop()
def onRead(self, ioLoop): clt, cltInfo = self.sock.accept() if self.connNum >= self.maxConnNum: clt.close() return if not self.handshake(clt): logger.getLogger('Server').info( 'client %s:%s connect but is not websocket protocol.', *cltInfo) return self.connNum += 1 pxy = proxy.Proxy(clt) ioLoop.addEvent(pxy, ioLoop.E_READ) logger.getLogger('Server').info( 'websocket client %s:%s is connect. server is gone.', *cltInfo)
def advapi32_regqueryvaluea_handler(hookcall): print_debug("advapi32.dll.RegQueryValueA() called!") myproxy = proxy.Proxy() if hookcall.params[1] != 0: subkey = myproxy.readasciiz(hookcall.params[1]) print_debug("subkey: " + subkey) if hookcall.params[2] != 0: value = myproxy.readasciiz(hookcall.params[2]) print_debug("value: " + value) logf("-- advapi32.dllRegQueryValueA( %Xh, %Xh, %Xh, %Xh )\r\n" % (hookcall.params[0], hookcall.params[1], hookcall.params[2], hookcall.params[3])) if hookcall.params[1] != 0: logf("subkey" + subkey + "\r\n") if hookcall.params[2] != 0: logf("value" + value + "\r\n") hookcall.sendack()
def start(benchmark=False): # Clients and servers connect to the Proxy through different URLs logging.config.fileConfig("proxy_logging.conf") tornado.options.parse_command_line() """ TODO: As new options are available, parse them and standardize the options dictionary. Current options are: DISTRIBUTION: Description: Defines how messages are distributed from proxy to app servers. Options: Round-robin or sticky (messages from a client always hit the same app server) """ proxy = proxymod.Proxy() proxy_options = {} proxy_options["DISTRIBUTION"] = PROXY_DISTRIBUTION.STICKY logging.debug("[proxy]: Loading proxy, please wait..") proxy.front = front.ClientLayer(proxy, proxy_options) proxy.back = back.ServerLayer(proxy, proxy_options) proxy.port = options.port logging.info("[proxy]: Proxy Started!") static_path = os.path.join("..", os.path.join("bin", "static")) logging.info("[proxy]: static path is " + static_path) if benchmark: filename = "proxy_resmon_" + str(uuid.uuid4())[:8] + ".csv" resmon = ResourceMonitor(filename, metrics=[('numUsers', proxy.front.get_num_users)]) resmon.start() application = tornado.web.Application([(r"/", front.HTTPHandler), (r"/static/(.*)", tornado.web.StaticFileHandler, { 'path': static_path }), (r"/client", front.ClientHandler), (r"/server", back.ServerHandler), (r"/admin", back.AdminHandler)]) application.listen(options.port) tornado.ioloop.IOLoop.instance().start()
def scrape(get_global_proxy_type_list=False): hidden_post_data_tuple = extract_hidden_post_data() proxy_types = ['https', 'socks4', 'socks5'] xpath_selectors = ['//*[@class="proxyListOdd"]', '//*[@class="proxyListEven"]'] scraped_proxy_list = [] for proxy_type in proxy_types: try: r = make_request(proxy_type, False, hidden_post_data_tuple) scraped_proxy_list.extend(get_proxy_list_from_response(r.text, proxy_type, *xpath_selectors)) except Exception as e: pass if get_global_proxy_type_list: temp_proxy_list = [] for curr_proxy in scraped_proxy_list: temp_proxy_list.append(proxy.Proxy(ip=curr_proxy.ip, port=curr_proxy.port, proxy_type=curr_proxy.proxy_type)) return temp_proxy_list return scraped_proxy_list
def advapi32_regenumvaluea_handler(hookcall): print_debug("advapi32.dll.RegEnumValueA() called!") myproxy = proxy.Proxy() if hookcall.params[2] != 0: valuename = myproxy.readasciiz(hookcall.params[2]) print_debug("valuename: " + valuename) if hookcall.params[3] != 0: valuename2 = myproxy.readasciiz(hookcall.params[3]) print_debug("valuename2: " + valuename2) logf( "-- advapi32.dll.RegEnumValueA( %Xh, %Xh, %Xh, %Xh, %Xh, %Xh, %Xh, %Xh)\r\n" % (hookcall.params[0], hookcall.params[1], hookcall.params[2], hookcall.params[3], hookcall.params[4], hookcall.params[5], hookcall.params[6], hookcall.params[7])) if hookcall.params[2] != 0: logf("valuename: " + valuename + "\r\n") if hookcall.params[3] != 0: logf("valuename2: " + valuename2 + "\r\n") hookcall.sendack()
t.start() # Set and start control thread controller = control.Control(proxydb=proxies, sharestats=shares) controller.listen_ip = args.control controller.listen_port = args.control_port controller.poolmap['pool'] = args.pool controller.poolmap['port'] = args.port controller.poolmap['user'] = args.username controller.poolmap['pass'] = args.password t = threading.Thread(target=controller.start, args=[]) t.daemon = True t.start() # Start listening for incoming connections server_listen = connection.Server(args.listen, args.listen_port) while not shutdown: # Wait for client connection miner = server_listen.listen() pool_connection = connection.Client(controller.poolmap['pool'], controller.poolmap['port']) pool = pool_connection.connect() proxy = Proxy.Proxy(pool, sharestats=shares) proxy.set_auth(controller.poolmap['user'], controller.poolmap['pass']) proxy.add_miner(miner) t = threading.Thread(target=proxy.start, args=[]) t.daemon = True t.start() proxies.add_proxy(proxy, t)
import web_app import proxy hproxy = proxy.Proxy('0.0.0.0', '192.168.178.54', 54897) hproxy.start()