def request(headers={}, payload=None): max_retry = 3 for i in range(max_retry): ssl_sock = None try: ssl_sock = https_manager.get_ssl_connection() if not ssl_sock: xlog.debug('create_ssl_connection fail') continue if ssl_sock.host == '': ssl_sock.appid = appid_manager.get_appid() if not ssl_sock.appid: google_ip.report_connect_closed(ssl_sock.ip, "no appid") raise GAE_Exception(1, "no appid can use") headers['Host'] = ssl_sock.appid + ".appspot.com" ssl_sock.host = headers['Host'] else: headers['Host'] = ssl_sock.host response = _request(ssl_sock, headers, payload) if not response: google_ip.report_connect_closed(ssl_sock.ip, "request_fail") ssl_sock.close() continue response.ssl_sock = ssl_sock return response except Exception as e: xlog.exception('request failed:%s', e) if ssl_sock: google_ip.report_connect_closed(ssl_sock.ip, "request_except") ssl_sock.close() raise GAE_Exception(2, "try max times")
def load_ip_range(self): self.ip_range_map = {} self.ip_range_list = [] self.ip_range_index = [] self.candidate_amount_ip = 0 content = self.load_range_content() lines = content.splitlines() for line in lines: if len(line) == 0 or line[0] == '#': continue try: begin, end = ip_utils.split_ip(line) nbegin = ip_utils.ip_string_to_num(begin) nend = ip_utils.ip_string_to_num(end) if not nbegin or not nend or nend < nbegin: xlog.warn("load ip range:%s fail", line) continue except Exception as e: xlog.exception("load ip range:%s fail:%r", line, e) continue self.ip_range_map[self.candidate_amount_ip] = [nbegin, nend] self.ip_range_list.append( [nbegin, nend] ) self.ip_range_index.append(self.candidate_amount_ip) num = nend - nbegin self.candidate_amount_ip += num # print ip_utils.ip_num_to_string(nbegin), ip_utils.ip_num_to_string(nend), num self.ip_range_index.sort()
def report_connect_fail(self, ip_str, force_remove=False): self.ip_lock.acquire() try: time_now = time.time() if not ip_str in self.ip_dict: return self.ip_dict[ip_str]["links"] -= 1 # ignore if system network is disconnected. if not force_remove: if not check_ip.network_is_ok(): xlog.debug("report_connect_fail network fail") # connect_control.fall_into_honeypot() return fail_time = self.ip_dict[ip_str]["fail_time"] if not force_remove and time_now - fail_time < 1: xlog.debug("fail time too near") return # increase handshake_time to make it can be used in lower probability self.ip_dict[ip_str]["handshake_time"] += 300 if self.ip_dict[ip_str]["fail_times"] == 0: self.good_ip_num -= 1 self.ip_dict[ip_str]["fail_times"] += 1 self.append_ip_history(ip_str, "fail") self.ip_dict[ip_str]["fail_time"] = time_now if force_remove or self.ip_dict[ip_str]["fail_times"] >= 50: property = self.ip_dict[ip_str] server = property["server"] del self.ip_dict[ip_str] if "gws" in server and ip_str in self.gws_ip_list: self.gws_ip_list.remove(ip_str) if not force_remove: self.to_remove_ip_list.put(ip_str) self.try_remove_thread() xlog.info( "remove ip tmp:%s left amount:%d gws_num:%d", ip_str, len(self.ip_dict), len(self.gws_ip_list) ) else: xlog.info( "remove ip:%s left amount:%d gws_num:%d", ip_str, len(self.ip_dict), len(self.gws_ip_list) ) if self.good_ip_num > len(self.ip_dict): self.good_ip_num = len(self.ip_dict) self.iplist_need_save = 1 except Exception as e: xlog.exception("set_ip err:%s", e) finally: self.ip_lock.release() if not self.is_ip_enough(): self.search_more_google_ip()
def remove_slowest_ip(self): if len(self.gws_ip_list) <= self.max_good_ip_num: return self.try_sort_ip_by_handshake_time(force=True) self.ip_lock.acquire() try: ip_num = len(self.gws_ip_list) while ip_num > self.max_good_ip_num: ip_str = self.gws_ip_list[ip_num - 1] property = self.ip_dict[ip_str] server = property['server'] handshake_time = property['handshake_time'] xlog.info("remove_slowest_ip:%s handshake_time:%d", ip_str, handshake_time) del self.ip_dict[ip_str] if 'gws' in server and ip_str in self.gws_ip_list: self.gws_ip_list.remove(ip_str) ip_num -= 1 except Exception as e: xlog.exception("remove_slowest_ip err:%s", e) finally: self.ip_lock.release()
def _request(self, method, host, path="/", headers={}, data="", timeout=40): try: response = self.http_dispatcher.request(method, host, path, dict(headers), data, timeout=timeout) status = response.status if status != 200: xlog.warn("front request %s %s%s fail, status:%d", method, host, path, status) content = response.task.read_all() # xlog.debug("%s %s%s trace:%s", method, response.ssl_sock.host, path, response.task.get_trace()) return content, status, response except Exception as e: xlog.exception("front request %s %s%s fail:%r", method, host, path, e) return "", 500, {}
def load_ip(self): if os.path.isfile(self.good_ip_file): file_path = self.good_ip_file else: file_path = self.default_good_ip_file with open(file_path, "r") as fd: lines = fd.readlines() for line in lines: try: if line.startswith("#"): continue str_l = line.split(" ") if len(str_l) < 4: xlog.warning("line err: %s", line) continue ip_str = str_l[0] domain = str_l[1] server = str_l[2] handshake_time = int(str_l[3]) if len(str_l) > 4: fail_times = int(str_l[4]) else: fail_times = 0 # logging.info("load ip: %s time:%d domain:%s server:%s", ip_str, handshake_time, domain, server) self.add_ip(ip_str, handshake_time, domain, server, fail_times) except Exception as e: xlog.exception("load_ip line:%s err:%s", line, e) xlog.info("load google ip_list num:%d, gws num:%d", len(self.ip_dict), len(self.gws_ip_list)) self.try_sort_gws_ip(force=True)
def test_app_head(ssl_sock, ip): appid = appid_manager.get_appid() request_data = 'GET / HTTP/1.1\r\nHost: %s.appspot.com\r\n\r\n' % appid time_start = time.time() ssl_sock.send(request_data.encode()) response = httplib.HTTPResponse(ssl_sock, buffering=True) try: response.begin() status = response.status if status != 200: xlog.debug("app check %s status:%d", ip, status) raise Exception("app check fail") content = response.read() if "GoAgent" not in content: xlog.debug("app check %s content:%s", ip, content) raise Exception("content fail") except Exception as e: xlog.exception("test_app_head except:%r", e) return False finally: response.close() time_stop = time.time() time_cost = (time_stop - time_start)*1000 xlog.debug("app check time:%d", time_cost) return True
def add_ip(self, ip_str, handshake_time, domain=None, server='', fail_times=0): if not isinstance(ip_str, basestring): xlog.error("add_ip input") handshake_time = int(handshake_time) self.ip_lock.acquire() try: if ip_str in self.ip_dict: self.ip_dict[ip_str]['handshake_time'] = handshake_time self.ip_dict[ip_str]['fail_times'] = fail_times self.ip_dict[ip_str]['fail_time'] = 0 self.ip_dict[ip_str]['history'].append([time.time(), handshake_time]) return False self.iplist_need_save = 1 self.ip_dict[ip_str] = {'handshake_time':handshake_time, "fail_times":fail_times, "transfered_data":0, 'data_active':0, 'domain':domain, 'server':server, "history":[[time.time(), handshake_time]], "fail_time":0, "success_time":0, "get_time":0} if 'gws' in server: self.gws_ip_list.append(ip_str) return True except Exception as e: xlog.exception("add_ip err:%s", e) finally: self.ip_lock.release() return False
def remove_slowest_ip(self): if len(self.gws_ip_list) <= self.max_good_ip_num: return self.try_sort_gws_ip(force=True) self.ip_lock.acquire() try: ip_num = len(self.gws_ip_list) while ip_num > self.max_good_ip_num: ip_str = self.gws_ip_list[ip_num - 1] property = self.ip_dict[ip_str] server = property["server"] fails = property["fail_times"] handshake_time = property["handshake_time"] xlog.info("remove_slowest_ip:%s handshake_time:%d, fails:%d", ip_str, handshake_time, fails) del self.ip_dict[ip_str] if "gws" in server and ip_str in self.gws_ip_list: self.gws_ip_list.remove(ip_str) ip_num -= 1 except Exception as e: xlog.exception("remove_slowest_ip err:%s", e) finally: self.ip_lock.release()
def load_ip(self): if os.path.isfile(self.good_ip_file): file_path = self.good_ip_file else: file_path = self.default_good_ip_file with open(file_path, "r") as fd: lines = fd.readlines() for line in lines: try: str_l = line.split(' ') if len(str_l) < 4: xlog.warning("line err: %s", line) continue ip_str = str_l[0] domain = str_l[1] server = str_l[2] handshake_time = int(str_l[3]) if len(str_l) > 4: fail_times = int(str_l[4]) else: fail_times = 0 #logging.info("load ip: %s time:%d domain:%s server:%s", ip_str, handshake_time, domain, server) self.add_ip(ip_str, handshake_time, domain, server, fail_times) except Exception as e: xlog.exception("load_ip line:%s err:%s", line, e) xlog.info("load google ip_list num:%d, gws num:%d", len(self.ip_dict), len(self.gws_ip_list)) self.try_sort_gws_ip(force=True)
def check_all_exist_ip(): good_ip_file_name = "good_ip.txt" good_ip_file = os.path.abspath( os.path.join(config.DATA_PATH, good_ip_file_name)) if not os.path.isfile(good_ip_file): print "open file ", good_ip_file_name, " fail." return with open(good_ip_file, "r") as fd: lines = fd.readlines() for line in lines: try: str_l = line.split(' ') if len(str_l) != 4: xlog.warning("line err: %s", line) continue ip_str = str_l[0] domain = str_l[1] server = str_l[2] handshake_time = int(str_l[3]) xlog.info("test ip: %s time:%d domain:%s server:%s", ip_str, handshake_time, domain, server) #test_with_app(ip_str) test_gws(ip_str) #self.add_ip(ip_str, handshake_time, domain, server) except Exception as e: xlog.exception("load_ip line:%s err:%s", line, e)
def load_ip_range(self): self.ip_range_map = {} self.ip_range_list = [] self.ip_range_index = [] self.candidate_amount_ip = 0 content = self.load_range_content() lines = content.splitlines() for line in lines: if len(line) == 0 or line[0] == '#': continue try: begin, end = ip_utils.split_ip(line) nbegin = ip_utils.ip_string_to_num(begin) nend = ip_utils.ip_string_to_num(end) if not nbegin or not nend or nend < nbegin: xlog.warn("load ip range:%s fail", line) continue except Exception as e: xlog.exception("load ip range:%s fail:%r", line, e) continue self.ip_range_map[self.candidate_amount_ip] = [nbegin, nend] self.ip_range_list.append([nbegin, nend]) self.ip_range_index.append(self.candidate_amount_ip) num = nend - nbegin self.candidate_amount_ip += num # print ip_utils.ip_num_to_string(nbegin), ip_utils.ip_num_to_string(nend), num self.ip_range_index.sort()
def test_app_head(ssl_sock, ip): appid = appid_manager.get_appid() request_data = 'GET / HTTP/1.1\r\nHost: %s.appspot.com\r\n\r\n' % appid time_start = time.time() ssl_sock.send(request_data.encode()) response = httplib.HTTPResponse(ssl_sock, buffering=True) try: response.begin() status = response.status if status != 200: xlog.debug("app check %s status:%d", ip, status) raise Exception("app check fail") content = response.read() if "GoAgent" not in content: xlog.debug("app check %s content:%s", ip, content) raise Exception("content fail") except Exception as e: xlog.exception("test_app_head except:%r", e) return False finally: response.close() time_stop = time.time() time_cost = (time_stop - time_start) * 1000 xlog.debug("app check time:%d", time_cost) return True
def scan_ip_worker(self): while self.searching_thread_count <= self.scan_ip_thread_num: if not connect_control.allow_scan(): time.sleep(10) continue try: time.sleep(1) ip_int = ip_range.get_ip() ip_str = ip_utils.ip_num_to_string(ip_int) if self.is_bad_ip(ip_str): continue result = check_ip.test_gws(ip_str) if not result: continue if self.add_ip(ip_str, result.handshake_time, result.domain, result.server_type): #logging.info("add %s CN:%s type:%s time:%d gws:%d ", ip_str, # result.domain, result.server_type, result.handshake_time, len(self.gws_ip_list)) xlog.info("scan_ip add ip:%s time:%d", ip_str, result.handshake_time) scan_ip_log.info("Add %s time:%d CN:%s type:%s", ip_str, result.handshake_time, result.domain, result.server_type) self.remove_slowest_ip() self.save_ip_list() except check_ip.HoneypotError as e: self.report_bad_ip(ip_str) connect_control.fall_into_honeypot() continue except Exception as e: xlog.exception("google_ip.runJob fail:%s", e) self.ncount_lock.acquire() self.searching_thread_count -= 1 self.ncount_lock.release() xlog.info("scan_ip_worker exit")
def scan_ip_worker(self): while self.searching_thread_count <= self.scan_ip_thread_num and connect_control.keep_running: if not connect_control.allow_scan(): time.sleep(10) continue try: time.sleep(1) ip_int = ip_range.get_ip() ip_str = ip_utils.ip_num_to_string(ip_int) if ip_str in self.ip_dict: continue connect_control.start_connect_register() result = check_ip.test_gae(ip_str) connect_control.end_connect_register() if not result: continue if self.add_ip(ip_str, result.handshake_time, result.domain, result.server_type): #logging.info("add %s CN:%s type:%s time:%d gws:%d ", ip_str, # result.domain, result.server_type, result.handshake_time, len(self.gws_ip_list)) xlog.info("scan_ip add ip:%s time:%d", ip_str, result.handshake_time) scan_ip_log.info("Add %s time:%d CN:%s type:%s", ip_str, result.handshake_time, result.domain, result.server_type) self.remove_slowest_ip() self.save_ip_list() except Exception as e: xlog.exception("google_ip.runJob fail:%s", e) self.ncount_lock.acquire() self.searching_thread_count -= 1 self.ncount_lock.release() xlog.info("scan_ip_worker exit")
def http_request(url, method="GET"): proxy_handler = urllib2.ProxyHandler({}) opener = urllib2.build_opener(proxy_handler) try: req = opener.open(url) except Exception as e: xlog.exception("web_control http_request:%s fail:%s", url, e) return
def report_connect_fail(self, ip_str, force_remove=False): self.ip_lock.acquire() try: time_now = time.time() if not ip_str in self.ip_dict: return self.ip_dict[ip_str]['links'] -= 1 # ignore if system network is disconnected. if not force_remove: if not check_ip.network_is_ok(): xlog.debug("report_connect_fail network fail") #connect_control.fall_into_honeypot() return fail_time = self.ip_dict[ip_str]["fail_time"] if not force_remove and time_now - fail_time < 1: xlog.debug("fail time too near") return # increase handshake_time to make it can be used in lower probability self.ip_dict[ip_str]['handshake_time'] += 300 if self.ip_dict[ip_str]['fail_times'] == 0: self.good_ip_num -= 1 self.ip_dict[ip_str]['fail_times'] += 1 self.append_ip_history(ip_str, "fail") self.ip_dict[ip_str]["fail_time"] = time_now if force_remove or self.ip_dict[ip_str]['fail_times'] >= 50: property = self.ip_dict[ip_str] server = property['server'] del self.ip_dict[ip_str] if 'gws' in server and ip_str in self.gws_ip_list: self.gws_ip_list.remove(ip_str) if not force_remove: self.to_remove_ip_list.put(ip_str) self.try_remove_thread() xlog.info("remove ip tmp:%s left amount:%d gws_num:%d", ip_str, len(self.ip_dict), len(self.gws_ip_list)) else: xlog.info("remove ip:%s left amount:%d gws_num:%d", ip_str, len(self.ip_dict), len(self.gws_ip_list)) if self.good_ip_num > len(self.ip_dict): self.good_ip_num = len(self.ip_dict) self.iplist_need_save = 1 except Exception as e: xlog.exception("set_ip err:%s", e) finally: self.ip_lock.release() if not self.is_ip_enough(): self.search_more_google_ip()
def check(self, callback=None, check_ca=True, close_ssl=True): ssl_sock = None try: ssl_sock, self.result.connct_time, self.result.handshake_time = connect_ssl( self.ip, timeout=self.timeout, openssl_context=self.openssl_context) # verify SSL certificate issuer. def check_ssl_cert(ssl_sock): cert = ssl_sock.get_peer_certificate() if not cert: #raise HoneypotError(' certficate is none') raise SSLError("no cert") issuer_commonname = next( (v for k, v in cert.get_issuer().get_components() if k == 'CN'), '') ssl_cert = cert_util.SSLCert(cert) xlog.info("%s CN:%s", self.ip, ssl_cert.cn) self.result.domain = ssl_cert.cn if check_ca: check_ssl_cert(ssl_sock) if callback: return callback(ssl_sock, self.ip) return True except SSLError as e: xlog.debug("Check_appengine %s SSLError:%s", self.ip, e) pass except IOError as e: xlog.warn("Check %s IOError:%s", self.ip, e) pass except httplib.BadStatusLine: #logging.debug('Check_appengine http.bad status line ip:%s', ip) #import traceback #traceback.print_exc() pass except Exception as e: if len(e.args) > 0: errno_str = e.args[0] else: errno_str = e.message xlog.exception('check_appengine %s %s err:%s', self.ip, errno_str, e) finally: if ssl_sock and close_ssl: ssl_sock.close() return False
def get_windows_running_process_list(): import os import glob import ctypes import collections Process = collections.namedtuple('Process', 'pid name exe') process_list = [] if os.name == 'nt': PROCESS_QUERY_INFORMATION = 0x0400 PROCESS_VM_READ = 0x0010 lpidProcess = (ctypes.c_ulong * 1024)() cb = ctypes.sizeof(lpidProcess) cbNeeded = ctypes.c_ulong() ctypes.windll.psapi.EnumProcesses(ctypes.byref(lpidProcess), cb, ctypes.byref(cbNeeded)) nReturned = cbNeeded.value / ctypes.sizeof(ctypes.c_ulong()) pidProcess = [i for i in lpidProcess][:nReturned] has_queryimage = hasattr(ctypes.windll.kernel32, 'QueryFullProcessImageNameA') for pid in pidProcess: hProcess = ctypes.windll.kernel32.OpenProcess( PROCESS_QUERY_INFORMATION | PROCESS_VM_READ, 0, pid) if hProcess: modname = ctypes.create_string_buffer(2048) count = ctypes.c_ulong(ctypes.sizeof(modname)) if has_queryimage: ctypes.windll.kernel32.QueryFullProcessImageNameA( hProcess, 0, ctypes.byref(modname), ctypes.byref(count)) else: ctypes.windll.psapi.GetModuleFileNameExA( hProcess, 0, ctypes.byref(modname), ctypes.byref(count)) exe = modname.value name = os.path.basename(exe) process_list.append(Process(pid=pid, name=name, exe=exe)) ctypes.windll.kernel32.CloseHandle(hProcess) elif sys.platform.startswith('linux'): for filename in glob.glob('/proc/[0-9]*/cmdline'): pid = int(filename.split('/')[2]) exe_link = '/proc/%d/exe' % pid if os.path.exists(exe_link): exe = os.readlink(exe_link) name = os.path.basename(exe) process_list.append(Process(pid=pid, name=name, exe=exe)) else: try: import psutil process_list = psutil.get_process_list() except Exception as e: xlog.exception( 'psutil.get_windows_running_process_list() failed: %r', e) return process_list
def req_config_handler(self): req = urlparse.urlparse(self.path).query reqs = urlparse.parse_qs(req, keep_blank_values=True) data = '' try: if reqs['cmd'] == ['get_config']: data = json.dumps(user_config.user_special, default=lambda o: o.__dict__) elif reqs['cmd'] == ['set_config']: user_config.user_special.appid = self.postvars['appid'][0] user_config.user_special.password = self.postvars['password'][ 0] user_config.user_special.proxy_enable = self.postvars[ 'proxy_enable'][0] user_config.user_special.proxy_type = self.postvars[ 'proxy_type'][0] user_config.user_special.proxy_host = self.postvars[ 'proxy_host'][0] user_config.user_special.proxy_port = self.postvars[ 'proxy_port'][0] user_config.user_special.proxy_user = self.postvars[ 'proxy_user'][0] user_config.user_special.proxy_passwd = self.postvars[ 'proxy_passwd'][0] user_config.user_special.host_appengine_mode = self.postvars[ 'host_appengine_mode'][0] user_config.user_special.ip_connect_interval = int( self.postvars['ip_connect_interval'][0]) user_config.user_special.use_ipv6 = int( self.postvars['use_ipv6'][0]) user_config.user_special.connect_interval = int( self.postvars['connect_interval'][0]) user_config.save() config.load() appid_manager.reset_appid() import connect_manager connect_manager.load_sock() connect_manager.https_manager.load_config() connect_manager.forwork_manager.load_config() google_ip.load_config() check_ip.load_sock() data = '{"res":"success"}' self.send_response('text/html', data) #http_request("http://127.0.0.1:8085/init_module?module=gae_proxy&cmd=restart") return except Exception as e: xlog.exception("req_config_handler except:%s", e) data = '{"res":"fail", "except":"%s"}' % e self.send_response('text/html', data)
def check(self, callback=None, check_ca=True, close_ssl=True): ssl_sock = None try: ssl_sock,self.result.connct_time,self.result.handshake_time = connect_ssl(self.ip, timeout=self.timeout, openssl_context=self.openssl_context) # verify SSL certificate issuer. def check_ssl_cert(ssl_sock): cert = ssl_sock.get_peer_certificate() if not cert: #raise HoneypotError(' certficate is none') raise SSLError("no cert") issuer_commonname = next((v for k, v in cert.get_issuer().get_components() if k == 'CN'), '') if self.check_cert and not issuer_commonname.startswith('Google'): raise HoneypotError(' certficate is issued by %r, not Google' % ( issuer_commonname)) ssl_cert = cert_util.SSLCert(cert) xlog.info("%s CN:%s", self.ip, ssl_cert.cn) self.result.domain = ssl_cert.cn if check_ca: check_ssl_cert(ssl_sock) if callback: return callback(ssl_sock, self.ip) return True except HoneypotError as e: xlog.warn("honeypot %s", self.ip) raise e except SSLError as e: xlog.debug("Check_appengine %s SSLError:%s", self.ip, e) pass except IOError as e: xlog.warn("Check %s IOError:%s", self.ip, e) pass except httplib.BadStatusLine: #logging.debug('Check_appengine http.bad status line ip:%s', ip) #import traceback #traceback.print_exc() pass except Exception as e: if len(e.args)>0: errno_str = e.args[0] else: errno_str = e.message xlog.exception('check_appengine %s %s err:%s', self.ip, errno_str, e) finally: if ssl_sock and close_ssl: ssl_sock.close() return False
def handle_one_request(self): try: try: self.raw_requestline = self.rfile.readline(65537) except Exception as e: #xlog.warn("simple server handle except %r", e) return if len(self.raw_requestline) > 65536: xlog.warn("recv command line too large") return if not self.raw_requestline: #xlog.warn("closed") return self.parse_request() if self.command == "GET": self.do_GET() elif self.command == "POST": self.do_POST() elif self.command == "CONNECT": self.do_CONNECT() elif self.command == "HEAD": self.do_HEAD() elif self.command == "DELETE": self.do_DELETE() elif self.command == "OPTIONS": self.do_OPTIONS() elif self.command == "PUT": self.do_PUT() else: xlog.warn("unhandler cmd:%s", self.command) return self.wfile.flush( ) #actually send the response if not already done. self.close_connection = 0 except socket.error as e: xlog.warn("socket error:%r", e) except IOError as e: if e.errno == errno.EPIPE: xlog.warn("PIPE error:%r", e) else: xlog.warn("IOError:%r", e) #except OpenSSL.SSL.SysCallError as e: # xlog.warn("socket error:%r", e) except Exception as e: xlog.exception("handler:%r", e)
def handle_one_request(self): try: try: self.raw_requestline = self.rfile.readline(65537) except Exception as e: #xlog.warn("simple server handle except %r", e) return if len(self.raw_requestline) > 65536: xlog.warn("recv command line too large") return if not self.raw_requestline: #xlog.warn("closed") return self.parse_request() if self.command == "GET": self.do_GET() elif self.command == "POST": self.do_POST() elif self.command == "CONNECT": self.do_CONNECT() elif self.command == "HEAD": self.do_HEAD() elif self.command == "DELETE": self.do_DELETE() elif self.command == "OPTIONS": self.do_OPTIONS() elif self.command == "PUT": self.do_PUT() else: xlog.warn("unhandler cmd:%s", self.command) return self.wfile.flush() #actually send the response if not already done. self.close_connection = 0 except socket.error as e: xlog.warn("socket error:%r", e) except IOError as e: if e.errno == errno.EPIPE: xlog.warn("PIPE error:%r", e) else: xlog.warn("IOError:%r", e) #except OpenSSL.SSL.SysCallError as e: # xlog.warn("socket error:%r", e) except Exception as e: xlog.exception("handler:%r", e)
def xxnet_version(): readme_file = os.path.join(root_path, "README.md") try: fd = open(readme_file, "r") lines = fd.readlines() import re p = re.compile(r'https://codeload.github.com/XX-net/XX-Net/zip/([0-9]+)\.([0-9]+)\.([0-9]+)') #zip/([0-9]+).([0-9]+).([0-9]+) #m = p.match(content) for line in lines: m = p.match(line) if m: version = m.group(1) + "." + m.group(2) + "." + m.group(3) return version except Exception as e: xlog.exception("xxnet_version fail") return "get_version_fail"
def _request(self, method, host, path="/", headers={}, data="", timeout=40): try: response = self.http_dispatcher.request(method, host, path, dict(headers), data, timeout=timeout) if not response: return "", 500, {} status = response.status if status != 200: xlog.warn("front request %s %s%s fail, status:%d", method, host, path, status) content = response.task.read_all() # xlog.debug("%s %s%s trace:%s", method, response.ssl_sock.host, path, response.task.get_trace()) return content, status, response except Exception as e: xlog.exception("front request %s %s%s fail:%r", method, host, path, e) return "", 500, {}
def add_ip(self, ip_str, handshake_time, domain=None, server="", fail_times=0): if not isinstance(ip_str, basestring): xlog.error("add_ip input") return if config.USE_IPV6 and ":" not in ip_str: xlog.warn("add %s but ipv6", ip_str) return handshake_time = int(handshake_time) self.ip_lock.acquire() try: if ip_str in self.ip_dict: self.ip_dict[ip_str]["handshake_time"] = handshake_time self.ip_dict[ip_str]["fail_times"] = fail_times self.ip_dict[ip_str]["fail_time"] = 0 self.append_ip_history(ip_str, handshake_time) return False self.iplist_need_save = 1 self.good_ip_num += 1 self.ip_dict[ip_str] = { "handshake_time": handshake_time, "fail_times": fail_times, "transfered_data": 0, "data_active": 0, "domain": domain, "server": server, "history": [[time.time(), handshake_time]], "fail_time": 0, "success_time": 0, "get_time": 0, "links": 0, } if "gws" in server: self.gws_ip_list.append(ip_str) return True except Exception as e: xlog.exception("add_ip err:%s", e) finally: self.ip_lock.release() return False
def report_connect_fail(self, ip_str, force_remove=False): # ignore if system network is disconnected. if not force_remove: if not check_ip.network_is_ok(): xlog.debug("report_connect_fail network fail") return self.ip_lock.acquire() try: if not ip_str in self.ip_dict: return fail_time = self.ip_dict[ip_str]["fail_time"] if not force_remove and time.time() - fail_time < 1: xlog.debug("fail time too near") return # increase handshake_time to make it can be used in lower probability self.ip_dict[ip_str]['handshake_time'] += 300 self.ip_dict[ip_str]['timeout'] += 1 self.ip_dict[ip_str]['history'].append([time.time(), "fail"]) self.ip_dict[ip_str]["fail_time"] = time.time() if force_remove or self.ip_dict[ip_str]['timeout'] >= 5: property = self.ip_dict[ip_str] server = property['server'] del self.ip_dict[ip_str] if 'gws' in server and ip_str in self.gws_ip_list: self.gws_ip_list.remove(ip_str) xlog.info("remove ip:%s left amount:%d gws_num:%d", ip_str, len(self.ip_dict), len(self.gws_ip_list)) if not force_remove: self.to_remove_ip_list.put(ip_str) self.try_remove_thread() self.iplist_need_save = 1 except Exception as e: xlog.exception("set_ip err:%s", e) finally: self.ip_lock.release() if not self.is_ip_enough(): self.search_more_google_ip()
def do_CONNECT_FWD(self): """socket forward for http CONNECT command""" host, _, port = self.path.rpartition(':') port = int(port) xlog.info('FWD %s %s:%d ', self.command, host, port) if host == "appengine.google.com" or host == "www.google.com": connected_in_s = 5 # gae_proxy upload to appengine is slow, it need more 'fresh' connection. else: connected_in_s = 10 # gws connect can be used after tcp connection created 15 s try: self.wfile.write(b'HTTP/1.1 200 OK\r\n\r\n') data = self.connection.recv(1024) except Exception as e: xlog.exception('do_CONNECT_FWD (%r, %r) Exception:%s', host, port, e) self.connection.close() return remote = forwork_manager.create_connection(host=host, port=port, sock_life=connected_in_s) if remote is None: self.connection.close() xlog.warn('FWD %s %s:%d create_connection fail', self.command, host, port) return try: if data: remote.send(data) except Exception as e: xlog.exception('do_CONNECT_FWD (%r, %r) Exception:%s', host, port, e) self.connection.close() remote.close() return # reset timeout default to avoid long http upload failure, but it will delay timeout retry :( remote.settimeout(None) forwork_manager.forward_socket(self.connection, remote, bufsize=self.bufsize) xlog.debug('FWD %s %s:%d with closed', self.command, host, port)
def get_windows_running_process_list(): import os import glob import ctypes import collections Process = collections.namedtuple('Process', 'pid name exe') process_list = [] if os.name == 'nt': PROCESS_QUERY_INFORMATION = 0x0400 PROCESS_VM_READ = 0x0010 lpidProcess= (ctypes.c_ulong * 1024)() cb = ctypes.sizeof(lpidProcess) cbNeeded = ctypes.c_ulong() ctypes.windll.psapi.EnumProcesses(ctypes.byref(lpidProcess), cb, ctypes.byref(cbNeeded)) nReturned = cbNeeded.value/ctypes.sizeof(ctypes.c_ulong()) pidProcess = [i for i in lpidProcess][:nReturned] has_queryimage = hasattr(ctypes.windll.kernel32, 'QueryFullProcessImageNameA') for pid in pidProcess: hProcess = ctypes.windll.kernel32.OpenProcess(PROCESS_QUERY_INFORMATION | PROCESS_VM_READ, 0, pid) if hProcess: modname = ctypes.create_string_buffer(2048) count = ctypes.c_ulong(ctypes.sizeof(modname)) if has_queryimage: ctypes.windll.kernel32.QueryFullProcessImageNameA(hProcess, 0, ctypes.byref(modname), ctypes.byref(count)) else: ctypes.windll.psapi.GetModuleFileNameExA(hProcess, 0, ctypes.byref(modname), ctypes.byref(count)) exe = modname.value name = os.path.basename(exe) process_list.append(Process(pid=pid, name=name, exe=exe)) ctypes.windll.kernel32.CloseHandle(hProcess) elif sys.platform.startswith('linux'): for filename in glob.glob('/proc/[0-9]*/cmdline'): pid = int(filename.split('/')[2]) exe_link = '/proc/%d/exe' % pid if os.path.exists(exe_link): exe = os.readlink(exe_link) name = os.path.basename(exe) process_list.append(Process(pid=pid, name=name, exe=exe)) else: try: import psutil process_list = psutil.get_process_list() except Exception as e: xlog.exception('psutil.get_windows_running_process_list() failed: %r', e) return process_list
def xxnet_version(): readme_file = os.path.join(root_path, "README.md") try: fd = open(readme_file, "r") lines = fd.readlines() import re p = re.compile( r'https://codeload.github.com/XX-net/XX-Net/zip/([0-9]+)\.([0-9]+)\.([0-9]+)' ) #zip/([0-9]+).([0-9]+).([0-9]+) #m = p.match(content) for line in lines: m = p.match(line) if m: version = m.group(1) + "." + m.group(2) + "." + m.group(3) return version except Exception as e: xlog.exception("xxnet_version fail") return "get_version_fail"
def report_connect_fail(self, ip_str, force_remove=False): # ignore if system network is disconnected. if not force_remove: if not check_ip.network_is_ok(): xlog.debug("report_connect_fail network fail") return self.ip_lock.acquire() try: if not ip_str in self.ip_dict: return fail_time = self.ip_dict[ip_str]["fail_time"] if not force_remove and time.time() - fail_time < 1: return # increase handshake_time to make it can be used in lower probability self.ip_dict[ip_str]['handshake_time'] += 200 self.ip_dict[ip_str]['timeout'] += 1 self.ip_dict[ip_str]['history'].append([time.time(), "fail"]) self.ip_dict[ip_str]["fail_time"] = time.time() if force_remove or self.ip_dict[ip_str]['timeout'] >= 50: property = self.ip_dict[ip_str] server = property['server'] del self.ip_dict[ip_str] if 'gws' in server and ip_str in self.gws_ip_list: self.gws_ip_list.remove(ip_str) xlog.info("remove ip:%s left amount:%d gws_num:%d", ip_str, len(self.ip_dict), len(self.gws_ip_list)) if not force_remove: self.to_remove_ip_list.put(ip_str) self.try_remove_thread() self.iplist_need_save = 1 except Exception as e: xlog.exception("set_ip err:%s", e) finally: self.ip_lock.release() if not self.is_ip_enough(): self.search_more_google_ip()
def is_traffic_quota_allow(self, ip_str): self.ip_lock.acquire() try: if ip_str in self.ip_dict: transfered_data = self.ip_dict[ip_str]['transfered_data'] if transfered_data == 0: return True active_time = self.ip_dict[ip_str]['data_active'] transfered_data = transfered_data - ((time.time() - active_time) * config.ip_traffic_quota) if transfered_data <= 0: self.ip_dict[ip_str]['transfered_data'] = 0 if transfered_data < config.ip_traffic_quota_base: return True except Exception as e: xlog.exception("is_traffic_quota_exceed err:%s", e) finally: self.ip_lock.release() return False
def load_ip(self): if os.path.isfile(self.good_ip_file): file_path = self.good_ip_file else: file_path = self.default_good_ip_file with open(file_path, "r") as fd: lines = fd.readlines() for line in lines: try: str_l = line.split(' ') if len(str_l) != 4: xlog.warning("line err: %s", line) continue ip_str = str_l[0] domain = str_l[1] server = str_l[2] handshake_time = int(str_l[3]) #logging.info("load ip: %s time:%d domain:%s server:%s", ip_str, handshake_time, domain, server) self.add_ip(ip_str, handshake_time, domain, server) except Exception as e: xlog.exception("load_ip line:%s err:%s", line, e) xlog.info("load google ip_list num:%d, gws num:%d", len(self.ip_dict), len(self.gws_ip_list)) self.try_sort_ip_by_handshake_time(force=True) if os.path.isfile(self.bad_ip_file): with open(self.bad_ip_file, "r") as fd: for line in fd.readlines(): try: if line == "\n": continue str_l = line.replace('\n', '') if not ip_utils.check_ip_valid(str_l): xlog.warning("bad_ip line err: %s", line) continue ip = str_l[1] self.bad_ip_pool.add(ip) except Exception as e: xlog.exception("parse bad_ip.txt err:%r", e)
def req_config_handler(self): req = urlparse.urlparse(self.path).query reqs = urlparse.parse_qs(req, keep_blank_values=True) data = "" try: if reqs["cmd"] == ["get_config"]: data = json.dumps(user_config.user_special, default=lambda o: o.__dict__) elif reqs["cmd"] == ["set_config"]: user_config.user_special.appid = self.postvars["appid"][0] user_config.user_special.password = self.postvars["password"][0] user_config.user_special.proxy_enable = self.postvars["proxy_enable"][0] user_config.user_special.proxy_type = self.postvars["proxy_type"][0] user_config.user_special.proxy_host = self.postvars["proxy_host"][0] user_config.user_special.proxy_port = self.postvars["proxy_port"][0] user_config.user_special.proxy_user = self.postvars["proxy_user"][0] user_config.user_special.proxy_passwd = self.postvars["proxy_passwd"][0] user_config.user_special.host_appengine_mode = self.postvars["host_appengine_mode"][0] user_config.user_special.ip_connect_interval = int(self.postvars["ip_connect_interval"][0]) user_config.user_special.use_ipv6 = int(self.postvars["use_ipv6"][0]) user_config.user_special.connect_interval = int(self.postvars["connect_interval"][0]) user_config.save() config.load() appid_manager.reset_appid() import connect_manager connect_manager.load_proxy_config() connect_manager.https_manager.load_config() connect_manager.forwork_manager.load_config() google_ip.reset() check_ip.load_proxy_config() data = '{"res":"success"}' self.send_response("text/html", data) # http_request("http://127.0.0.1:8085/init_module?module=gae_proxy&cmd=restart") return except Exception as e: xlog.exception("req_config_handler except:%s", e) data = '{"res":"fail", "except":"%s"}' % e self.send_response("text/html", data)
def get_ip(self): #return self.get_real_random_ip() while True: index = random.randint(0, len(self.ip_range_list) - 1) ip_range = self.ip_range_list[index] #logging.debug("random.randint %d - %d", ip_range[0], ip_range[1]) if ip_range[1] == ip_range[0]: return ip_range[1] try: id_2 = random.randint(0, ip_range[1] - ip_range[0]) except Exception as e: xlog.exception("random.randint:%r %d - %d, %d", e, ip_range[0], ip_range[1], ip_range[1] - ip_range[0]) ip = ip_range[0] + id_2 add_last_byte = ip % 255 if add_last_byte == 0 or add_last_byte == 255: continue return ip
def forward_socket(self, local, remote, timeout=60, tick=2, bufsize=8192): try: timecount = timeout while 1: timecount -= tick if timecount <= 0: break (ins, _, errors) = select.select([local, remote], [], [local, remote], tick) if errors: break if not ins: continue for sock in ins: data = sock.recv(bufsize) if not data: if sock is remote: xlog.debug("forward remote disconnected.") else: xlog.debug("forward local disconnected.") return if sock is remote: local.sendall(data) timecount = timeout else: remote.sendall(data) timecount = timeout except Exception as e: if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET, errno.ENOTCONN, errno.EPIPE): xlog.exception("forward except:%s.", e) finally: if local: local.close() if remote: remote.close()
def do_CONNECT_AGENT(self): """deploy fake cert to client""" # GAE supports the following HTTP methods: GET, POST, HEAD, PUT, DELETE, and PATCH host, _, port = self.path.rpartition(':') port = int(port) certfile = CertUtil.get_cert(host) xlog.info('GAE %s %s:%d ', self.command, host, port) self.__realconnection = None self.wfile.write(b'HTTP/1.1 200 OK\r\n\r\n') try: ssl_sock = ssl.wrap_socket(self.connection, keyfile=certfile, certfile=certfile, server_side=True) except ssl.SSLError as e: xlog.info('ssl error: %s, create full domain cert for host:%s', e, host) certfile = CertUtil.get_cert(host, full_name=True) return except Exception as e: if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET): xlog.exception('ssl.wrap_socket(self.connection=%r) failed: %s path:%s, errno:%s', self.connection, e, self.path, e.args[0]) return self.__realconnection = self.connection self.__realwfile = self.wfile self.__realrfile = self.rfile self.connection = ssl_sock self.rfile = self.connection.makefile('rb', self.bufsize) self.wfile = self.connection.makefile('wb', 0) try: self.raw_requestline = self.rfile.readline(65537) if len(self.raw_requestline) > 65536: self.requestline = '' self.request_version = '' self.command = '' self.send_error(414) return if not self.raw_requestline: self.close_connection = 1 return if not self.parse_request(): return except NetWorkIOError as e: if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET, errno.EPIPE): xlog.exception('ssl.wrap_socket(self.connection=%r) failed: %s path:%s, errno:%s', self.connection, e, self.path, e.args[0]) raise if self.path[0] == '/' and host: self.path = 'https://%s%s' % (self.headers['Host'], self.path) xlog.debug('GAE CONNECT %s %s', self.command, self.path) if self.command not in self.gae_support_methods: if host.endswith(".google.com") or host.endswith(config.HOSTS_FWD_ENDSWITH) or host.endswith(config.HOSTS_GAE_ENDSWITH): if host in config.HOSTS_GAE: gae_set = [s for s in config.HOSTS_GAE] gae_set.remove(host) config.HOSTS_GAE = tuple(gae_set) if host not in config.HOSTS_FWD: fwd_set = [s for s in config.HOSTS_FWD] fwd_set.append(host) config.HOSTS_FWD = tuple(fwd_set) xlog.warn("Method %s not support in GAE, Redirect to FWD for %s", self.command, self.path) return self.wfile.write(('HTTP/1.1 301\r\nLocation: %s\r\n\r\n' % self.path).encode()) else: xlog.warn("Method %s not support in GAEProxy for %s", self.command, self.path) return self.wfile.write(('HTTP/1.1 404 Not Found\r\n\r\n').encode()) try: if self.path[0] == '/' and host: self.path = 'http://%s%s' % (host, self.path) elif not host and '://' in self.path: host = urlparse.urlparse(self.path).netloc self.parsed_url = urlparse.urlparse(self.path) return self.do_AGENT() except NetWorkIOError as e: if e.args[0] not in (errno.ECONNABORTED, errno.ETIMEDOUT, errno.EPIPE): raise finally: if self.__realconnection: try: self.__realconnection.shutdown(socket.SHUT_WR) self.__realconnection.close() except NetWorkIOError: pass finally: self.__realconnection = None
def __fetchlet(self, range_queue, data_queue, range_delay_size): headers = dict((k.title(), v) for k, v in self.headers.items()) headers['Connection'] = 'close' while not self._stopped: try: try: start, end, response = range_queue.get(timeout=1) if self.expect_begin < start and data_queue.qsize( ) * self.bufsize + range_delay_size > 30 * 1024 * 1024: range_queue.put((start, end, response)) time.sleep(10) continue headers['Range'] = 'bytes=%d-%d' % (start, end) if not response: response = fetch(self.method, self.url, headers, self.body) except Queue.Empty: continue except Exception as e: xlog.warning("RangeFetch fetch response %r in __fetchlet", e) range_queue.put((start, end, None)) continue if not response: xlog.warning('RangeFetch %s return %r', headers['Range'], response) range_queue.put((start, end, None)) continue if response.app_status != 200: xlog.warning('Range Fetch return %s "%s %s" %s ', response.app_status, self.method, self.url, headers['Range']) if response.app_status == 404: xlog.warning('APPID %r not exists, remove it.', response.ssl_sock.appid) appid_manager.report_not_exist(response.ssl_sock.appid) appid = appid_manager.get_appid() if not appid: xlog.error("no appid left") self._stopped = True response.close() return if response.app_status == 503: xlog.warning( 'APPID %r out of Quota, remove it temporary.', response.ssl_sock.appid) appid_manager.report_out_of_quota( response.ssl_sock.appid) appid = appid_manager.get_appid() if not appid: xlog.error("no appid left") self._stopped = True response.close() return response.close() range_queue.put((start, end, None)) continue if response.getheader('Location'): self.url = urlparse.urljoin(self.url, response.getheader('Location')) xlog.info('RangeFetch Redirect(%r)', self.url) response.close() range_queue.put((start, end, None)) continue if 200 <= response.status < 300: content_range = response.getheader('Content-Range') if not content_range: xlog.warning( 'RangeFetch "%s %s" return Content-Range=%r: response headers=%r, retry %s-%s', self.method, self.url, content_range, response.getheaders(), start, end) response.close() range_queue.put((start, end, None)) continue content_length = int( response.getheader('Content-Length', 0)) xlog.info('>>>>>>>>>>>>>>> [thread %s] %s %s', threading.currentThread().ident, content_length, content_range) time_last_read = time.time() while start < end + 1: try: data = response.read(self.bufsize) if not data: if time.time() - time_last_read > 20: break else: time.sleep(0.1) continue time_last_read = time.time() data_len = len(data) data_queue.put((start, data)) start += data_len except Exception as e: xlog.warning('RangeFetch "%s %s" %s failed: %s', self.method, self.url, headers['Range'], e) break if start < end + 1: xlog.warning('RangeFetch "%s %s" retry %s-%s', self.method, self.url, start, end) response.close() range_queue.put((start, end, None)) continue https_manager.save_ssl_connection_for_reuse( response.ssl_sock) xlog.info('>>>>>>>>>>>>>>> Successfully reached %d bytes.', start - 1) else: xlog.error('RangeFetch %r return %s', self.url, response.status) response.close() range_queue.put((start, end, None)) continue except StandardError as e: xlog.exception('RangeFetch._fetchlet error:%s', e) raise
def handler(method, url, headers, body, wfile): time_request = time.time() headers = clean_empty_header(headers) errors = [] response = None while True: if time.time() - time_request > 30: #time out return return_fail_message(wfile) try: response = fetch(method, url, headers, body) if response.app_status != 200: xlog.warn("fetch gae status:%s url:%s", response.app_status, url) if response.app_status == 404: xlog.warning('APPID %r not exists, remove it.', response.ssl_sock.appid) appid_manager.report_not_exist(response.ssl_sock.appid) appid = appid_manager.get_appid() if not appid: html = generate_message_html('404 No usable Appid Exists', u'没有可用appid了,请配置可用的appid') send_response(wfile, 404, body=html.encode('utf-8')) response.close() return else: response.close() continue if response.app_status == 403 or response.app_status == 405: #Method not allowed # google have changed from gws to gvs, need to remove. xlog.warning('405 Method not allowed. remove %s ', response.ssl_sock.ip) # some ip can connect, and server type is gws # but can't use as GAE server # so we need remove it immediately google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True) response.close() continue if response.app_status == 503: xlog.warning('APPID %r out of Quota, remove it.', response.ssl_sock.appid) appid_manager.report_out_of_quota(response.ssl_sock.appid) appid = appid_manager.get_appid() if not appid: html = generate_message_html('503 No usable Appid Exists', u'appid流量不足,请增加appid') send_response(wfile, 503, body=html.encode('utf-8')) response.close() return else: response.close() continue if response.app_status < 500: break except GAE_Exception as e: errors.append(e) xlog.warn("gae_exception:%r %s", e, url) except Exception as e: errors.append(e) xlog.exception('gae_handler.handler %r %s , retry...', e, url) if response.status == 206: return RangeFetch(method, url, headers, body, response, wfile).fetch() try: wfile.write("HTTP/1.1 %d %s\r\n" % (response.status, response.reason)) response_headers = {} for key, value in response.getheaders(): key = key.title() if key == 'Transfer-Encoding': #http://en.wikipedia.org/wiki/Chunked_transfer_encoding continue if key in skip_headers: continue response_headers[key] = value if 'X-Head-Content-Length' in response_headers: if method == "HEAD": response_headers['Content-Length'] = response_headers[ 'X-Head-Content-Length'] del response_headers['X-Head-Content-Length'] send_to_browser = True try: for key in response_headers: value = response_headers[key] send_header(wfile, key, value) #logging.debug("Head- %s: %s", key, value) wfile.write("\r\n") except Exception as e: send_to_browser = False xlog.warn("gae_handler.handler send response fail. t:%d e:%r %s", time.time() - time_request, e, url) if len(response.app_msg): xlog.warn("APPID error:%d url:%s", response.status, url) wfile.write(response.app_msg) response.close() return content_length = int(response.getheader('Content-Length', 0)) content_range = response.getheader('Content-Range', '') if content_range: start, end, length = tuple( int(x) for x in re.search(r'bytes (\d+)-(\d+)/(\d+)', content_range).group(1, 2, 3)) else: start, end, length = 0, content_length - 1, content_length last_read_time = time.time() while True: if start > end: https_manager.save_ssl_connection_for_reuse(response.ssl_sock) xlog.info("GAE t:%d s:%d %d %s", (time.time() - time_request) * 1000, length, response.status, url) return data = response.read(config.AUTORANGE_BUFSIZE) if not data: if time.time() - last_read_time > 20: response.close() xlog.warn("read timeout t:%d len:%d left:%d %s", (time.time() - time_request) * 1000, length, (end - start), url) return else: time.sleep(0.1) continue last_read_time = time.time() data_len = len(data) start += data_len if send_to_browser: try: ret = wfile.write(data) if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ: xlog.debug("send to browser wfile.write ret:%d", ret) ret = wfile.write(data) except Exception as e_b: if e_b[0] in (errno.ECONNABORTED, errno.EPIPE, errno.ECONNRESET ) or 'bad write retry' in repr(e_b): xlog.warn('gae_handler send to browser return %r %r', e_b, url) else: xlog.warn('gae_handler send to browser return %r %r', e_b, url) send_to_browser = False except NetWorkIOError as e: time_except = time.time() time_cost = time_except - time_request if e[0] in (errno.ECONNABORTED, errno.EPIPE) or 'bad write retry' in repr(e): xlog.warn("gae_handler err:%r time:%d %s ", e, time_cost, url) else: xlog.exception("gae_handler except:%r %s", e, url) except Exception as e: xlog.exception("gae_handler except:%r %s", e, url)
def load(self): """load config from proxy.ini""" current_path = os.path.dirname(os.path.abspath(__file__)) ConfigParser.RawConfigParser.OPTCRE = re.compile( r'(?P<option>[^=\s][^=]*)\s*(?P<vi>[=])\s*(?P<value>.*)$') self.CONFIG = ConfigParser.ConfigParser() self.CONFIG_FILENAME = os.path.abspath( os.path.join(current_path, 'proxy.ini')) self.DATA_PATH = os.path.abspath( os.path.join(current_path, os.pardir, os.pardir, 'data', 'gae_proxy')) if not os.path.isdir(self.DATA_PATH): self.DATA_PATH = current_path # load ../../../data/gae_proxy/config.ini, set by web_ui self.CONFIG_USER_FILENAME = os.path.abspath( os.path.join(self.DATA_PATH, 'config.ini')) self.CONFIG.read(self.CONFIG_FILENAME) if os.path.isfile(self.CONFIG_USER_FILENAME): with open(self.CONFIG_USER_FILENAME, 'rb') as fp: content = fp.read() self.CONFIG.readfp(io.BytesIO(content)) # load ../../../data/gae_proxy/manual.ini, set by manual self.CONFIG_MANUAL_FILENAME = os.path.abspath( os.path.join(self.DATA_PATH, 'manual.ini')) if os.path.isfile(self.CONFIG_MANUAL_FILENAME): with open(self.CONFIG_MANUAL_FILENAME, 'rb') as fp: content = fp.read() try: self.CONFIG.readfp(io.BytesIO(content)) xlog.info("load manual.ini success") except Exception as e: xlog.exception("data/gae_proxy/manual.ini load error:%s", e) self.LISTEN_IP = self.CONFIG.get('listen', 'ip') self.LISTEN_PORT = self.CONFIG.getint('listen', 'port') self.LISTEN_VISIBLE = self.CONFIG.getint('listen', 'visible') self.LISTEN_DEBUGINFO = self.CONFIG.getint('listen', 'debuginfo') self.GAE_APPIDS = re.findall( r'[\w\-\.]+', self.CONFIG.get('gae', 'appid').replace('.appspot.com', '')) self.GAE_PASSWORD = self.CONFIG.get('gae', 'password').strip() fwd_endswith = [] fwd_hosts = [] direct_endswith = [] direct_hosts = [] gae_endswith = [] gae_hosts = [] for k, v in self.CONFIG.items('hosts'): if v == "fwd": if k.startswith('.'): fwd_endswith.append(k) else: fwd_hosts.append(k) elif v == "direct": if k.startswith('.'): direct_endswith.append(k) else: direct_hosts.append(k) elif v == "gae": if k.startswith('.'): gae_endswith.append(k) else: gae_hosts.append(k) self.HOSTS_FWD_ENDSWITH = tuple(fwd_endswith) self.HOSTS_FWD = tuple(fwd_hosts) self.HOSTS_GAE_ENDSWITH = tuple(gae_endswith) self.HOSTS_GAE = tuple(gae_hosts) self.HOSTS_DIRECT_ENDSWITH = tuple(direct_endswith) self.HOSTS_DIRECT = tuple(direct_hosts) self.AUTORANGE_MAXSIZE = self.CONFIG.getint('autorange', 'maxsize') self.AUTORANGE_WAITSIZE = self.CONFIG.getint('autorange', 'waitsize') self.AUTORANGE_BUFSIZE = self.CONFIG.getint('autorange', 'bufsize') self.AUTORANGE_THREADS = self.CONFIG.getint('autorange', 'threads') self.PAC_ENABLE = self.CONFIG.getint('pac', 'enable') self.PAC_IP = self.CONFIG.get('pac', 'ip') self.PAC_PORT = self.CONFIG.getint('pac', 'port') self.PAC_FILE = self.CONFIG.get('pac', 'file').lstrip('/') self.PAC_GFWLIST = self.CONFIG.get('pac', 'gfwlist') self.PAC_ADBLOCK = self.CONFIG.get( 'pac', 'adblock') if self.CONFIG.has_option('pac', 'adblock') else '' self.PAC_EXPIRED = self.CONFIG.getint('pac', 'expired') self.pac_url = 'http://%s:%d/%s\n' % (self.PAC_IP, self.PAC_PORT, self.PAC_FILE) self.CONTROL_ENABLE = self.CONFIG.getint('control', 'enable') self.CONTROL_IP = self.CONFIG.get('control', 'ip') self.CONTROL_PORT = self.CONFIG.getint('control', 'port') self.PROXY_ENABLE = self.CONFIG.getint('proxy', 'enable') self.PROXY_TYPE = self.CONFIG.get('proxy', 'type') self.PROXY_HOST = self.CONFIG.get('proxy', 'host') self.PROXY_PORT = self.CONFIG.get('proxy', 'port') if self.PROXY_PORT == "": self.PROXY_PORT = 0 else: self.PROXY_PORT = int(self.PROXY_PORT) self.PROXY_USER = self.CONFIG.get('proxy', 'user') self.PROXY_PASSWD = self.CONFIG.get('proxy', 'passwd') self.LOVE_ENABLE = self.CONFIG.getint('love', 'enable') self.LOVE_TIP = self.CONFIG.get( 'love', 'tip').encode('utf8').decode('unicode-escape').split('|') self.USE_IPV6 = self.CONFIG.getint('google_ip', 'use_ipv6') self.https_max_connect_thread = config.CONFIG.getint( "connect_manager", "https_max_connect_thread") # change to True when finished import CA cert to browser # launcher will wait import ready then open browser to show status, check update etc self.cert_import_ready = False
def __fetchlet(self, range_queue, data_queue, range_delay_size): headers = dict((k.title(), v) for k, v in self.headers.items()) headers["Connection"] = "close" while not self._stopped: try: try: start, end, response = range_queue.get(timeout=1) if ( self.expect_begin < start and data_queue.qsize() * self.bufsize + range_delay_size > 30 * 1024 * 1024 ): range_queue.put((start, end, response)) time.sleep(10) continue headers["Range"] = "bytes=%d-%d" % (start, end) if not response: response = fetch(self.method, self.url, headers, self.body) except Queue.Empty: continue except Exception as e: xlog.warning("RangeFetch fetch response %r in __fetchlet", e) range_queue.put((start, end, None)) continue if not response: xlog.warning("RangeFetch %s return %r", headers["Range"], response) range_queue.put((start, end, None)) continue if response.app_status != 200: xlog.warning( 'Range Fetch return %s "%s %s" %s ', response.app_status, self.method, self.url, headers["Range"], ) if response.app_status == 404: xlog.warning("APPID %r not exists, remove it.", response.ssl_sock.appid) appid_manager.report_not_exist(response.ssl_sock.appid) appid = appid_manager.get_appid() if not appid: xlog.error("no appid left") self._stopped = True response.close() return if response.app_status == 503: xlog.warning("APPID %r out of Quota, remove it temporary.", response.ssl_sock.appid) appid_manager.report_out_of_quota(response.ssl_sock.appid) appid = appid_manager.get_appid() if not appid: xlog.error("no appid left") self._stopped = True response.close() return response.close() range_queue.put((start, end, None)) continue if response.getheader("Location"): self.url = urlparse.urljoin(self.url, response.getheader("Location")) xlog.info("RangeFetch Redirect(%r)", self.url) response.close() range_queue.put((start, end, None)) continue if 200 <= response.status < 300: content_range = response.getheader("Content-Range") if not content_range: xlog.warning( 'RangeFetch "%s %s" return Content-Range=%r: response headers=%r, retry %s-%s', self.method, self.url, content_range, response.getheaders(), start, end, ) response.close() range_queue.put((start, end, None)) continue content_length = int(response.getheader("Content-Length", 0)) xlog.info( ">>>>>>>>>>>>>>> [thread %s] %s %s", threading.currentThread().ident, content_length, content_range, ) time_last_read = time.time() while start < end + 1: try: data = response.read(self.bufsize) if not data: if time.time() - time_last_read > 20: break else: time.sleep(0.1) continue time_last_read = time.time() data_len = len(data) data_queue.put((start, data)) start += data_len except Exception as e: xlog.warning('RangeFetch "%s %s" %s failed: %s', self.method, self.url, headers["Range"], e) break if start < end + 1: xlog.warning('RangeFetch "%s %s" retry %s-%s', self.method, self.url, start, end) response.close() range_queue.put((start, end, None)) continue https_manager.save_ssl_connection_for_reuse(response.ssl_sock) xlog.info(">>>>>>>>>>>>>>> Successfully reached %d bytes.", start - 1) else: xlog.error("RangeFetch %r return %s", self.url, response.status) response.close() range_queue.put((start, end, None)) continue except StandardError as e: xlog.exception("RangeFetch._fetchlet error:%s", e) raise
def do_CONNECT_DIRECT(self): """deploy fake cert to client""" host, _, port = self.path.rpartition(':') port = int(port) if port != 443: xlog.warn("CONNECT %s port:%d not support", host, port) return certfile = CertUtil.get_cert(host) xlog.info('GAE %s %s:%d ', self.command, host, port) self.__realconnection = None self.wfile.write(b'HTTP/1.1 200 OK\r\n\r\n') try: ssl_sock = ssl.wrap_socket(self.connection, keyfile=certfile, certfile=certfile, server_side=True) except ssl.SSLError as e: xlog.info('ssl error: %s, create full domain cert for host:%s', e, host) certfile = CertUtil.get_cert(host, full_name=True) return except Exception as e: if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET): xlog.exception('ssl.wrap_socket(self.connection=%r) failed: %s path:%s, errno:%s', self.connection, e, self.path, e.args[0]) return self.__realconnection = self.connection self.__realwfile = self.wfile self.__realrfile = self.rfile self.connection = ssl_sock self.rfile = self.connection.makefile('rb', self.bufsize) self.wfile = self.connection.makefile('wb', 0) try: self.raw_requestline = self.rfile.readline(65537) if len(self.raw_requestline) > 65536: self.requestline = '' self.request_version = '' self.command = '' self.send_error(414) return if not self.raw_requestline: self.close_connection = 1 return if not self.parse_request(): return except NetWorkIOError as e: if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET, errno.EPIPE): raise if self.path[0] == '/' and host: self.path = 'https://%s%s' % (self.headers['Host'], self.path) xlog.debug('GAE CONNECT %s %s', self.command, self.path) try: if self.path[0] == '/' and host: self.path = 'http://%s%s' % (host, self.path) elif not host and '://' in self.path: host = urlparse.urlparse(self.path).netloc self.parsed_url = urlparse.urlparse(self.path) if len(self.parsed_url[4]): path = '?'.join([self.parsed_url[2], self.parsed_url[4]]) else: path = self.parsed_url[2] request_headers = dict((k.title(), v) for k, v in self.headers.items()) payload = b'' if 'Content-Length' in request_headers: try: payload_len = int(request_headers.get('Content-Length', 0)) #logging.debug("payload_len:%d %s %s", payload_len, self.command, self.path) payload = self.rfile.read(payload_len) except NetWorkIOError as e: xlog.error('handle_method_urlfetch read payload failed:%s', e) return direct_handler.handler(self.command, host, path, request_headers, payload, self.wfile) except NetWorkIOError as e: if e.args[0] not in (errno.ECONNABORTED, errno.ETIMEDOUT, errno.EPIPE): raise finally: if self.__realconnection: try: self.__realconnection.shutdown(socket.SHUT_WR) self.__realconnection.close() except NetWorkIOError: pass finally: self.__realconnection = None
def update_pacfile(filename): opener = get_opener() listen_ip = config.LISTEN_IP autoproxy = gae_proxy_listen blackhole = pac_listen default = 'DIRECT' if config.PAC_ADBLOCK: try: xlog.info('try download %r to update_pacfile(%r)', config.PAC_ADBLOCK, filename) adblock_content = opener.open(config.PAC_ADBLOCK).read() except Exception as e: xlog.warn("pac_update download adblock fail:%r", e) return try: xlog.info('try download %r to update_pacfile(%r)', config.PAC_GFWLIST, filename) pac_content = opener.open(config.PAC_GFWLIST).read() except Exception as e: xlog.warn("pac_update download gfwlist fail:%r", e) return content = '' need_update = True with open(get_serving_pacfile(), 'rb') as fp: content = fp.read() try: placeholder = '// AUTO-GENERATED RULES, DO NOT MODIFY!' content = content[:content.index(placeholder) + len(placeholder)] content = re.sub(r'''blackhole\s*=\s*['"]PROXY [\.\w:]+['"]''', 'blackhole = \'PROXY %s\'' % blackhole, content) content = re.sub(r'''autoproxy\s*=\s*['"]PROXY [\.\w:]+['"]''', 'autoproxy = \'PROXY %s\'' % autoproxy, content) if content.startswith('//'): line = '// Proxy Auto-Config file generated by autoproxy2pac, %s\r\n' % time.strftime( '%Y-%m-%d %H:%M:%S') content = line + '\r\n'.join(content.splitlines()[1:]) except ValueError: need_update = False try: if config.PAC_ADBLOCK: xlog.info('%r downloaded, try convert it with adblock2pac', config.PAC_ADBLOCK) jsrule = PacUtil.adblock2pac(adblock_content, 'FindProxyForURLByAdblock', blackhole, default) content += '\r\n' + jsrule + '\r\n' xlog.info('%r downloaded and parsed', config.PAC_ADBLOCK) else: content += '\r\nfunction FindProxyForURLByAdblock(url, host) {return "DIRECT";}\r\n' except Exception as e: need_update = False xlog.exception('update_pacfile failed: %r', e) return try: autoproxy_content = base64.b64decode(pac_content) xlog.info('%r downloaded, try convert it with autoproxy2pac', config.PAC_GFWLIST) jsrule = PacUtil.autoproxy2pac(autoproxy_content, 'FindProxyForURLByAutoProxy', autoproxy, default) content += '\r\n' + jsrule + '\r\n' xlog.info('%r downloaded and parsed', config.PAC_GFWLIST) except Exception as e: need_update = False xlog.exception('update_pacfile failed: %r', e) return if need_update: with open(user_pacfile, 'wb') as fp: fp.write(content) xlog.info('%r successfully updated', user_pacfile) serving_pacfile = user_pacfile
def handler(method, host, url, headers, body, wfile): time_request = time.time() errors = [] response = None while True: if time.time() - time_request > 30: return return_fail_message(wfile) try: response = fetch(method, host, url, headers, body) if response: break except OpenSSL.SysCallError as e: errors.append(e) xlog.warn("direct_handler.handler err:%r %s/%s", e, host, url) except Exception as e: errors.append(e) xlog.exception('direct_handler.handler %r %s %s , retry...', e, host, url) try: send_to_browser = True try: response_headers = dict((k.title(), v) for k, v in response.getheaders()) wfile.write("HTTP/1.1 %d %s\r\n" % (response.status, response.reason)) for key, value in response.getheaders(): send_header(wfile, key, value) wfile.write("\r\n") except Exception as e: send_to_browser = False wait_time = time.time()-time_request xlog.warn("direct_handler.handler send response fail. t:%d e:%r %s%s", wait_time, e, host, url) if method == 'HEAD' or response.status in (204, 304): xlog.info("DIRECT t:%d %d %s %s", (time.time()-time_request)*1000, response.status, host, url) https_manager.save_ssl_connection_for_reuse(response.ssl_sock, host) response.close() return if 'Transfer-Encoding' in response_headers: length = 0 while True: try: data = response.read(8192) except httplib.IncompleteRead, e: data = e.partial if send_to_browser: try: if not data: wfile.write('0\r\n\r\n') break length += len(data) wfile.write('%x\r\n' % len(data)) wfile.write(data) wfile.write('\r\n') except Exception as e: send_to_browser = False xlog.warn("direct_handler.handler send Transfer-Encoding t:%d e:%r %s/%s", time.time()-time_request, e, host, url) else: if not data: break response.close() xlog.info("DIRECT chucked t:%d s:%d %d %s %s", (time.time()-time_request)*1000, length, response.status, host, url) return content_length = int(response.getheader('Content-Length', 0)) content_range = response.getheader('Content-Range', '') if content_range: start, end, length = tuple(int(x) for x in re.search(r'bytes (\d+)-(\d+)/(\d+)', content_range).group(1, 2, 3)) else: start, end, length = 0, content_length-1, content_length time_last_read = time.time() while True: if start > end: https_manager.save_ssl_connection_for_reuse(response.ssl_sock, host) xlog.info("DIRECT t:%d s:%d %d %s %s", (time.time()-time_request)*1000, length, response.status, host, url) return data = response.read(config.AUTORANGE_BUFSIZE) if not data: if time.time() - time_last_read > 20: response.close() xlog.warn("read timeout t:%d len:%d left:%d %s %s", (time.time()-time_request)*1000, length, (end-start), host, url) return else: time.sleep(0.1) continue time_last_read = time.time() data_len = len(data) start += data_len if send_to_browser: try: ret = wfile.write(data) if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ: xlog.debug("send to browser wfile.write ret:%d", ret) ret = wfile.write(data) except Exception as e_b: if e_b[0] in (errno.ECONNABORTED, errno.EPIPE, errno.ECONNRESET) or 'bad write retry' in repr(e_b): xlog.warn('direct_handler send to browser return %r %s %r', e_b, host, url) else: xlog.warn('direct_handler send to browser return %r %s %r', e_b, host, url) send_to_browser = False
else: time.sleep(0.1) continue time_last_read = time.time() data_len = len(data) start += data_len if send_to_browser: try: ret = wfile.write(data) if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ: xlog.debug("send to browser wfile.write ret:%d", ret) ret = wfile.write(data) except Exception as e_b: if e_b[0] in (errno.ECONNABORTED, errno.EPIPE, errno.ECONNRESET) or 'bad write retry' in repr(e_b): xlog.warn('direct_handler send to browser return %r %s %r', e_b, host, url) else: xlog.warn('direct_handler send to browser return %r %s %r', e_b, host, url) send_to_browser = False except NetWorkIOError as e: time_except = time.time() time_cost = time_except - time_request if e[0] in (errno.ECONNABORTED, errno.EPIPE) or 'bad write retry' in repr(e): xlog.exception("direct_handler err:%r %s %s time:%d", e, host, url, time_cost) else: xlog.exception("direct_handler except:%r %s %s", e, host, url) except Exception as e: xlog.exception("direct_handler except:%r %s %s", e, host, url)
def load(self): """load config from proxy.ini""" current_path = os.path.dirname(os.path.abspath(__file__)) ConfigParser.RawConfigParser.OPTCRE = re.compile(r'(?P<option>[^=\s][^=]*)\s*(?P<vi>[=])\s*(?P<value>.*)$') self.CONFIG = ConfigParser.ConfigParser() self.CONFIG_FILENAME = os.path.abspath( os.path.join(current_path, 'proxy.ini')) self.DATA_PATH = os.path.abspath( os.path.join(current_path, os.pardir, os.pardir, 'data', 'gae_proxy')) if not os.path.isdir(self.DATA_PATH): self.DATA_PATH = current_path # load ../../../data/gae_proxy/config.ini, set by web_ui self.CONFIG_USER_FILENAME = os.path.abspath( os.path.join(self.DATA_PATH, 'config.ini')) self.CONFIG.read(self.CONFIG_FILENAME) if os.path.isfile(self.CONFIG_USER_FILENAME): with open(self.CONFIG_USER_FILENAME, 'rb') as fp: content = fp.read() self.CONFIG.readfp(io.BytesIO(content)) # load ../../../data/gae_proxy/manual.ini, set by manual self.CONFIG_MANUAL_FILENAME = os.path.abspath( os.path.join(self.DATA_PATH, 'manual.ini')) if os.path.isfile(self.CONFIG_MANUAL_FILENAME): with open(self.CONFIG_MANUAL_FILENAME, 'rb') as fp: content = fp.read() try: self.CONFIG.readfp(io.BytesIO(content)) xlog.info("load manual.ini success") except Exception as e: xlog.exception("data/gae_proxy/manual.ini load error:%s", e) self.LISTEN_IP = self.CONFIG.get('listen', 'ip') self.LISTEN_PORT = self.CONFIG.getint('listen', 'port') self.LISTEN_VISIBLE = self.CONFIG.getint('listen', 'visible') self.LISTEN_DEBUGINFO = self.CONFIG.getint('listen', 'debuginfo') self.GAE_APPIDS = re.findall(r'[\w\-\.]+', self.CONFIG.get('gae', 'appid').replace('.appspot.com', '')) self.GAE_PASSWORD = self.CONFIG.get('gae', 'password').strip() fwd_endswith = [] fwd_hosts = [] direct_endswith = [] direct_hosts = [] gae_endswith = [] gae_hosts = [] for k, v in self.CONFIG.items('hosts'): if v == "fwd": if k.startswith('.'): fwd_endswith.append(k) else: fwd_hosts.append(k) elif v == "direct": if k.startswith('.'): direct_endswith.append(k) else: direct_hosts.append(k) elif v == "gae": if k.startswith('.'): gae_endswith.append(k) else: gae_hosts.append(k) self.HOSTS_FWD_ENDSWITH = tuple(fwd_endswith) self.HOSTS_FWD = tuple(fwd_hosts) self.HOSTS_GAE_ENDSWITH = tuple(gae_endswith) self.HOSTS_GAE = tuple(gae_hosts) self.HOSTS_DIRECT_ENDSWITH = tuple(direct_endswith) self.HOSTS_DIRECT = tuple(direct_hosts) self.AUTORANGE_MAXSIZE = self.CONFIG.getint('autorange', 'maxsize') self.AUTORANGE_WAITSIZE = self.CONFIG.getint('autorange', 'waitsize') self.AUTORANGE_BUFSIZE = self.CONFIG.getint('autorange', 'bufsize') self.AUTORANGE_THREADS = self.CONFIG.getint('autorange', 'threads') self.PAC_ENABLE = self.CONFIG.getint('pac', 'enable') self.PAC_IP = self.CONFIG.get('pac', 'ip') self.PAC_PORT = self.CONFIG.getint('pac', 'port') self.PAC_FILE = self.CONFIG.get('pac', 'file').lstrip('/') self.PAC_GFWLIST = self.CONFIG.get('pac', 'gfwlist') self.PAC_ADBLOCK = self.CONFIG.get('pac', 'adblock') if self.CONFIG.has_option('pac', 'adblock') else '' self.PAC_EXPIRED = self.CONFIG.getint('pac', 'expired') self.pac_url = 'http://%s:%d/%s\n' % (self.PAC_IP, self.PAC_PORT, self.PAC_FILE) self.CONTROL_ENABLE = self.CONFIG.getint('control', 'enable') self.CONTROL_IP = self.CONFIG.get('control', 'ip') self.CONTROL_PORT = self.CONFIG.getint('control', 'port') self.PROXY_ENABLE = self.CONFIG.getint('proxy', 'enable') self.PROXY_TYPE = self.CONFIG.get('proxy', 'type') self.PROXY_HOST = self.CONFIG.get('proxy', 'host') self.PROXY_PORT = self.CONFIG.get('proxy', 'port') if self.PROXY_PORT == "": self.PROXY_PORT = 0 else: self.PROXY_PORT = int(self.PROXY_PORT) self.PROXY_USER = self.CONFIG.get('proxy', 'user') self.PROXY_PASSWD = self.CONFIG.get('proxy', 'passwd') self.LOVE_ENABLE = self.CONFIG.getint('love', 'enable') self.LOVE_TIP = self.CONFIG.get('love', 'tip').encode('utf8').decode('unicode-escape').split('|') self.USE_IPV6 = self.CONFIG.getint('google_ip', 'use_ipv6') self.https_max_connect_thread = config.CONFIG.getint("connect_manager", "https_max_connect_thread") # change to True when finished import CA cert to browser # launcher will wait import ready then open browser to show status, check update etc self.cert_import_ready = False
return else: time.sleep(0.1) continue time_last_read = time.time() data_len = len(data) start += data_len if send_to_browser: try: ret = wfile.write(data) if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ: xlog.debug("send to browser wfile.write ret:%d", ret) ret = wfile.write(data) except Exception as e_b: if e_b[0] in (errno.ECONNABORTED, errno.EPIPE, errno.ECONNRESET) or "bad write retry" in repr(e_b): xlog.warn("direct_handler send to browser return %r %s %r", e_b, host, url) else: xlog.warn("direct_handler send to browser return %r %s %r", e_b, host, url) send_to_browser = False except NetWorkIOError as e: time_except = time.time() time_cost = time_except - time_request if e[0] in (errno.ECONNABORTED, errno.EPIPE) or "bad write retry" in repr(e): xlog.exception("direct_handler err:%r %s %s time:%d", e, host, url, time_cost) else: xlog.exception("direct_handler except:%r %s %s", e, host, url) except Exception as e: xlog.exception("direct_handler except:%r %s %s", e, host, url)
def do_CONNECT_DIRECT(self): """deploy fake cert to client""" host, _, port = self.path.rpartition(':') port = int(port) if port != 443: xlog.warn("CONNECT %s port:%d not support", host, port) return certfile = CertUtil.get_cert(host) xlog.info('GAE %s %s:%d ', self.command, host, port) self.__realconnection = None self.wfile.write(b'HTTP/1.1 200 OK\r\n\r\n') try: ssl_sock = ssl.wrap_socket(self.connection, keyfile=certfile, certfile=certfile, server_side=True) except ssl.SSLError as e: xlog.info('ssl error: %s, create full domain cert for host:%s', e, host) certfile = CertUtil.get_cert(host, full_name=True) return except Exception as e: if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET): xlog.exception( 'ssl.wrap_socket(self.connection=%r) failed: %s path:%s, errno:%s', self.connection, e, self.path, e.args[0]) return self.__realconnection = self.connection self.__realwfile = self.wfile self.__realrfile = self.rfile self.connection = ssl_sock self.rfile = self.connection.makefile('rb', self.bufsize) self.wfile = self.connection.makefile('wb', 0) try: self.raw_requestline = self.rfile.readline(65537) if len(self.raw_requestline) > 65536: self.requestline = '' self.request_version = '' self.command = '' self.send_error(414) return if not self.raw_requestline: self.close_connection = 1 return if not self.parse_request(): return except NetWorkIOError as e: if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET, errno.EPIPE): raise if self.path[0] == '/' and host: self.path = 'https://%s%s' % (self.headers['Host'], self.path) xlog.debug('GAE CONNECT %s %s', self.command, self.path) try: if self.path[0] == '/' and host: self.path = 'http://%s%s' % (host, self.path) elif not host and '://' in self.path: host = urlparse.urlparse(self.path).netloc self.parsed_url = urlparse.urlparse(self.path) if len(self.parsed_url[4]): path = '?'.join([self.parsed_url[2], self.parsed_url[4]]) else: path = self.parsed_url[2] request_headers = dict( (k.title(), v) for k, v in self.headers.items()) payload = b'' if 'Content-Length' in request_headers: try: payload_len = int(request_headers.get('Content-Length', 0)) #logging.debug("payload_len:%d %s %s", payload_len, self.command, self.path) payload = self.rfile.read(payload_len) except NetWorkIOError as e: xlog.error('handle_method_urlfetch read payload failed:%s', e) return direct_handler.handler(self.command, host, path, request_headers, payload, self.wfile) except NetWorkIOError as e: if e.args[0] not in (errno.ECONNABORTED, errno.ETIMEDOUT, errno.EPIPE): raise finally: if self.__realconnection: try: self.__realconnection.shutdown(socket.SHUT_WR) self.__realconnection.close() except NetWorkIOError: pass finally: self.__realconnection = None
def do_CONNECT_AGENT(self): """deploy fake cert to client""" # GAE supports the following HTTP methods: GET, POST, HEAD, PUT, DELETE, and PATCH host, _, port = self.path.rpartition(':') port = int(port) certfile = CertUtil.get_cert(host) xlog.info('GAE %s %s:%d ', self.command, host, port) self.__realconnection = None self.wfile.write(b'HTTP/1.1 200 OK\r\n\r\n') try: ssl_sock = ssl.wrap_socket(self.connection, keyfile=certfile, certfile=certfile, server_side=True) except ssl.SSLError as e: xlog.info('ssl error: %s, create full domain cert for host:%s', e, host) certfile = CertUtil.get_cert(host, full_name=True) return except Exception as e: if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET): xlog.exception( 'ssl.wrap_socket(self.connection=%r) failed: %s path:%s, errno:%s', self.connection, e, self.path, e.args[0]) return self.__realconnection = self.connection self.__realwfile = self.wfile self.__realrfile = self.rfile self.connection = ssl_sock self.rfile = self.connection.makefile('rb', self.bufsize) self.wfile = self.connection.makefile('wb', 0) try: self.raw_requestline = self.rfile.readline(65537) if len(self.raw_requestline) > 65536: self.requestline = '' self.request_version = '' self.command = '' self.send_error(414) xlog.warn("read request line len:%d", len(self.raw_requestline)) return if not self.raw_requestline: xlog.warn("read request line empty") return if not self.parse_request(): xlog.warn("parse request fail:%s", self.raw_requestline) return except NetWorkIOError as e: if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET, errno.EPIPE): xlog.exception( 'ssl.wrap_socket(self.connection=%r) failed: %s path:%s, errno:%s', self.connection, e, self.path, e.args[0]) raise if self.path[0] == '/' and host: self.path = 'https://%s%s' % (self.headers['Host'], self.path) xlog.debug('GAE CONNECT %s %s', self.command, self.path) if self.command not in self.gae_support_methods: if host.endswith(".google.com") or host.endswith( config.HOSTS_FWD_ENDSWITH) or host.endswith( config.HOSTS_GAE_ENDSWITH): if host in config.HOSTS_GAE: gae_set = [s for s in config.HOSTS_GAE] gae_set.remove(host) config.HOSTS_GAE = tuple(gae_set) if host not in config.HOSTS_FWD: fwd_set = [s for s in config.HOSTS_FWD] fwd_set.append(host) config.HOSTS_FWD = tuple(fwd_set) xlog.warn( "Method %s not support in GAE, Redirect to FWD for %s", self.command, self.path) return self.wfile.write( ('HTTP/1.1 301\r\nLocation: %s\r\n\r\n' % self.path).encode()) else: xlog.warn("Method %s not support in GAEProxy for %s", self.command, self.path) return self.wfile.write( ('HTTP/1.1 404 Not Found\r\n\r\n').encode()) try: if self.path[0] == '/' and host: self.path = 'http://%s%s' % (host, self.path) elif not host and '://' in self.path: host = urlparse.urlparse(self.path).netloc self.parsed_url = urlparse.urlparse(self.path) return self.do_AGENT() except NetWorkIOError as e: if e.args[0] not in (errno.ECONNABORTED, errno.ETIMEDOUT, errno.EPIPE): raise finally: if self.__realconnection: try: self.__realconnection.shutdown(socket.SHUT_WR) self.__realconnection.close() except NetWorkIOError: pass finally: self.__realconnection = None
def handler(method, host, url, headers, body, wfile): time_request = time.time() errors = [] response = None while True: if time.time() - time_request > 30: return return_fail_message(wfile) try: response = fetch(method, host, url, headers, body) if response: break except OpenSSL.SysCallError as e: errors.append(e) xlog.warn("direct_handler.handler err:%r %s/%s", e, host, url) except Exception as e: errors.append(e) xlog.exception("direct_handler.handler %r %s %s , retry...", e, host, url) try: send_to_browser = True try: response_headers = dict((k.title(), v) for k, v in response.getheaders()) wfile.write("HTTP/1.1 %d %s\r\n" % (response.status, response.reason)) for key, value in response.getheaders(): send_header(wfile, key, value) wfile.write("\r\n") except Exception as e: send_to_browser = False wait_time = time.time() - time_request xlog.warn("direct_handler.handler send response fail. t:%d e:%r %s%s", wait_time, e, host, url) if method == "HEAD" or response.status in (204, 304): xlog.info("DIRECT t:%d %d %s %s", (time.time() - time_request) * 1000, response.status, host, url) https_manager.save_ssl_connection_for_reuse(response.ssl_sock, host) response.close() return if "Transfer-Encoding" in response_headers: length = 0 while True: try: data = response.read(8192) except httplib.IncompleteRead, e: data = e.partial if send_to_browser: try: if not data: wfile.write("0\r\n\r\n") break length += len(data) wfile.write("%x\r\n" % len(data)) wfile.write(data) wfile.write("\r\n") except Exception as e: send_to_browser = False xlog.warn( "direct_handler.handler send Transfer-Encoding t:%d e:%r %s/%s", time.time() - time_request, e, host, url, ) else: if not data: break response.close() xlog.info( "DIRECT chucked t:%d s:%d %d %s %s", (time.time() - time_request) * 1000, length, response.status, host, url, ) return content_length = int(response.getheader("Content-Length", 0)) content_range = response.getheader("Content-Range", "") if content_range: start, end, length = tuple( int(x) for x in re.search(r"bytes (\d+)-(\d+)/(\d+)", content_range).group(1, 2, 3) ) else: start, end, length = 0, content_length - 1, content_length time_last_read = time.time() while True: if start > end: https_manager.save_ssl_connection_for_reuse(response.ssl_sock, host) xlog.info( "DIRECT t:%d s:%d %d %s %s", (time.time() - time_request) * 1000, length, response.status, host, url ) return data = response.read(config.AUTORANGE_BUFSIZE) if not data: if time.time() - time_last_read > 20: response.close() xlog.warn( "read timeout t:%d len:%d left:%d %s %s", (time.time() - time_request) * 1000, length, (end - start), host, url, ) return else: time.sleep(0.1) continue time_last_read = time.time() data_len = len(data) start += data_len if send_to_browser: try: ret = wfile.write(data) if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ: xlog.debug("send to browser wfile.write ret:%d", ret) ret = wfile.write(data) except Exception as e_b: if e_b[0] in (errno.ECONNABORTED, errno.EPIPE, errno.ECONNRESET) or "bad write retry" in repr(e_b): xlog.warn("direct_handler send to browser return %r %s %r", e_b, host, url) else: xlog.warn("direct_handler send to browser return %r %s %r", e_b, host, url) send_to_browser = False