def fetch(method, host, path, headers, payload, bufsize=8192): request_data = '%s %s HTTP/1.1\r\n' % (method, path) request_data += ''.join('%s: %s\r\n' % (k, v) for k, v in headers.items()) request_data += '\r\n' ssl_sock = https_manager.get_ssl_connection(host) if not ssl_sock: return try: ssl_sock.send(request_data.encode()) payload_len = len(payload) start = 0 while start < payload_len: send_size = min(payload_len - start, 65535) sended = ssl_sock.send(payload[start:start+send_size]) start += sended response = httplib.HTTPResponse(ssl_sock, buffering=True) response.ssl_sock = ssl_sock orig_timeout = ssl_sock.gettimeout() ssl_sock.settimeout(90) response.begin() ssl_sock.settimeout(orig_timeout) except httplib.BadStatusLine as e: xlog.warn("direct_handler.fetch bad status line:%r", e) google_ip.report_connect_closed(ssl_sock.ip, "request_fail") response = None except Exception as e: xlog.warn("direct_handler.fetch:%r", e) google_ip.report_connect_closed(ssl_sock.ip, "request_fail") response = None return response
def req_config_handler(self): req = urlparse.urlparse(self.path).query reqs = urlparse.parse_qs(req, keep_blank_values=True) data = '' appid_updated = False try: if reqs['cmd'] == ['get_config']: data = json.dumps(user_config.user_special, default=lambda o: o.__dict__) elif reqs['cmd'] == ['set_config']: appids = self.postvars['appid'][0] if appids != user_config.user_special.appid: if appids: fail_appid_list = test_appid.test_appids(appids) if len(fail_appid_list): fail_appid = "|".join(fail_appid_list) return self.send_response('text/html', '{"res":"fail", "reason":"appid fail:%s"}' % fail_appid) appid_updated = True user_config.user_special.appid = appids user_config.user_special.password = self.postvars['password'][0] user_config.user_special.proxy_enable = self.postvars['proxy_enable'][0] user_config.user_special.proxy_type = self.postvars['proxy_type'][0] user_config.user_special.proxy_host = self.postvars['proxy_host'][0] user_config.user_special.proxy_port = self.postvars['proxy_port'][0] if not user_config.user_special.proxy_port: user_config.user_special.proxy_port = 0 user_config.user_special.proxy_user = self.postvars['proxy_user'][0] user_config.user_special.proxy_passwd = self.postvars['proxy_passwd'][0] user_config.user_special.host_appengine_mode = self.postvars['host_appengine_mode'][0] use_ipv6 = int(self.postvars['use_ipv6'][0]) if user_config.user_special.use_ipv6 != use_ipv6: if use_ipv6: if not check_ip.check_ipv6(): xlog.warn("Enable Ipv6 but check failed.") return self.send_response('text/html', '{"res":"fail", "reason":"IPv6 fail"}') user_config.user_special.use_ipv6 = use_ipv6 user_config.save() config.load() appid_manager.reset_appid() import connect_manager connect_manager.load_proxy_config() connect_manager.https_manager.load_config() if appid_updated: connect_manager.https_manager.clean_old_connection() google_ip.reset() check_ip.load_proxy_config() data = '{"res":"success"}' self.send_response('text/html', data) #http_request("http://127.0.0.1:8085/init_module?module=gae_proxy&cmd=restart") return except Exception as e: xlog.exception("req_config_handler except:%s", e) data = '{"res":"fail", "except":"%s"}' % e self.send_response('text/html', data)
def adjust_scan_thread_num(self, max_scan_ip_thread_num=None): if max_scan_ip_thread_num: self.max_scan_ip_thread_num = max_scan_ip_thread_num if not self.auto_adjust_scan_ip_thread_num: scan_ip_thread_num = self.max_scan_ip_thread_num elif len(self.gws_ip_list) < 100: scan_ip_thread_num = self.max_scan_ip_thread_num else: try: the_100th_ip = self.gws_ip_list[99] the_100th_handshake_time = self.ip_dict[the_100th_ip][ 'handshake_time'] scan_ip_thread_num = int((the_100th_handshake_time - 200) / 2 * self.max_scan_ip_thread_num / 50) except Exception as e: xlog.warn("adjust_scan_thread_num fail:%r", e) return if scan_ip_thread_num > self.max_scan_ip_thread_num: scan_ip_thread_num = self.max_scan_ip_thread_num if scan_ip_thread_num != self.scan_ip_thread_num: xlog.info("Adjust scan thread num from %d to %d", self.scan_ip_thread_num, scan_ip_thread_num) self.scan_ip_thread_num = scan_ip_thread_num self.search_more_google_ip()
def start_scan_all_exist_ip(self): if hasattr(self, "scan_all_ip_thread") and self.scan_all_ip_thread: xlog.warn("scan all exist ip is running") return self.scan_all_ip_thread = threading.Thread(target=self.scan_all_exist_ip) self.scan_all_ip_thread.start()
def save(self): CONFIG_USER_FILENAME = os.path.abspath( os.path.join(root_path, 'data', 'gae_proxy', 'config.ini')) try: f = open(CONFIG_USER_FILENAME, 'w') if self.user_special.appid != "": f.write("[gae]\n") f.write("appid = %s\n" % self.user_special.appid) f.write("password = %s\n\n" % self.user_special.password) f.write("[proxy]\n") f.write("enable = %s\n" % self.user_special.proxy_enable) f.write("type = %s\n" % self.user_special.proxy_type) f.write("host = %s\n" % self.user_special.proxy_host) f.write("port = %s\n" % self.user_special.proxy_port) f.write("user = %s\n" % self.user_special.proxy_user) f.write("passwd = %s\n\n" % self.user_special.proxy_passwd) if self.user_special.host_appengine_mode != "gae": f.write("[hosts]\n") f.write("appengine.google.com = %s\n" % self.user_special.host_appengine_mode) f.write("www.google.com = %s\n\n" % self.user_special.host_appengine_mode) f.write("[google_ip]\n") if int(self.user_special.auto_adjust_scan_ip_thread_num) != self.DEFAULT_CONFIG.getint('google_ip', 'auto_adjust_scan_ip_thread_num'): f.write("auto_adjust_scan_ip_thread_num = %d\n\n" % int(self.user_special.auto_adjust_scan_ip_thread_num)) if int(self.user_special.scan_ip_thread_num) != self.DEFAULT_CONFIG.getint('google_ip', 'max_scan_ip_thread_num'): f.write("max_scan_ip_thread_num = %d\n\n" % int(self.user_special.scan_ip_thread_num)) if int(self.user_special.use_ipv6) != self.DEFAULT_CONFIG.getint('google_ip', 'use_ipv6'): f.write("use_ipv6 = %d\n\n" % int(self.user_special.use_ipv6)) f.close() except: xlog.warn("launcher.config save user config fail:%s", CONFIG_USER_FILENAME)
def forward_local(self): host = self.headers.get('Host', '') host_ip, _, port = host.rpartition(':') http_client = simple_http_client.HTTP_client((host_ip, int(port))) request_headers = dict((k.title(), v) for k, v in self.headers.items()) payload = b'' if 'Content-Length' in request_headers: try: payload_len = int(request_headers.get('Content-Length', 0)) payload = self.rfile.read(payload_len) except Exception as e: xlog.warn('forward_local read payload failed:%s', e) return self.parsed_url = urlparse.urlparse(self.path) if len(self.parsed_url[4]): path = '?'.join([self.parsed_url[2], self.parsed_url[4]]) else: path = self.parsed_url[2] content, status, response = http_client.request( self.command, path, request_headers, payload) if not status: xlog.warn("forward_local fail") return out_list = [] out_list.append("HTTP/1.1 %d\r\n" % status) for key, value in response.getheaders(): key = key.title() out_list.append("%s: %s\r\n" % (key, value)) out_list.append("\r\n") out_list.append(content) self.wfile.write("".join(out_list))
def do_POST(self): try: refer = self.headers.getheader('Referer') netloc = urlparse.urlparse(refer).netloc if not netloc.startswith("127.0.0.1") and not netloc.startswitch("localhost"): xlog.warn("web control ref:%s refuse", netloc) return except: pass xlog.debug ('GAEProxy web_control %s %s %s ', self.address_string(), self.command, self.path) try: ctype, pdict = cgi.parse_header(self.headers.getheader('content-type')) if ctype == 'multipart/form-data': self.postvars = cgi.parse_multipart(self.rfile, pdict) elif ctype == 'application/x-www-form-urlencoded': length = int(self.headers.getheader('content-length')) self.postvars = urlparse.parse_qs(self.rfile.read(length), keep_blank_values=1) else: self.postvars = {} except: self.postvars = {} path = urlparse.urlparse(self.path).path if path == '/deploy': return self.req_deploy_handler() elif path == "/config": return self.req_config_handler() elif path == "/scan_ip": return self.req_scan_ip_handler() elif path.startswith("/importip"): return self.req_importip_handler() else: self.wfile.write(b'HTTP/1.1 404\r\nContent-Type: text/plain\r\nConnection: close\r\n\r\n404 Not Found') xlog.info('%s "%s %s HTTP/1.1" 404 -', self.address_string(), self.command, self.path)
def forward_local(self): host = self.headers.get('Host', '') host_ip, _, port = host.rpartition(':') http_client = simple_http_client.HTTP_client((host_ip, int(port))) request_headers = dict((k.title(), v) for k, v in self.headers.items()) payload = b'' if 'Content-Length' in request_headers: try: payload_len = int(request_headers.get('Content-Length', 0)) payload = self.rfile.read(payload_len) except Exception as e: xlog.warn('forward_local read payload failed:%s', e) return self.parsed_url = urlparse.urlparse(self.path) if len(self.parsed_url[4]): path = '?'.join([self.parsed_url[2], self.parsed_url[4]]) else: path = self.parsed_url[2] content, status, response = http_client.request(self.command, path, request_headers, payload) if not status: xlog.warn("forward_local fail") return out_list = [] out_list.append("HTTP/1.1 %d\r\n" % status) for key, value in response.getheaders(): key = key.title() out_list.append("%s: %s\r\n" % (key, value)) out_list.append("\r\n") out_list.append(content) self.wfile.write("".join(out_list))
def remove_ip_process(self): try: while connect_control.keep_running: try: ip_str = self.to_remove_ip_list.get_nowait() except: break result = check_ip.test(ip_str) if result and result.appspot_ok: self.add_ip(ip_str, result.handshake_time, result.domain, result.server_type) xlog.debug("remove ip process, restore ip:%s", ip_str) continue if not check_ip.network_is_ok(): self.to_remove_ip_list.put(ip_str) xlog.warn("network is unreachable. check your network connection.") return xlog.info("real remove ip:%s ", ip_str) self.iplist_need_save = 1 finally: self.remove_ip_thread_num_lock.acquire() self.remove_ip_thread_num -= 1 self.remove_ip_thread_num_lock.release()
def process_appid_not_exist(self, appid, ip): if check_ip.test_gae_ip(ip, "xxnet-1"): self.set_appid_not_exist(appid) else: xlog.warn("process_appid_not_exist, remove ip:%s", ip) from google_ip import google_ip google_ip.report_connect_fail(ip, force_remove=True)
def load_ip_range(self): self.ip_range_map = {} self.ip_range_list = [] self.ip_range_index = [] self.candidate_amount_ip = 0 content = self.load_range_content() lines = content.splitlines() for line in lines: if len(line) == 0 or line[0] == '#': continue try: begin, end = ip_utils.split_ip(line) nbegin = ip_utils.ip_string_to_num(begin) nend = ip_utils.ip_string_to_num(end) if not nbegin or not nend or nend < nbegin: xlog.warn("load ip range:%s fail", line) continue except Exception as e: xlog.exception("load ip range:%s fail:%r", line, e) continue self.ip_range_map[self.candidate_amount_ip] = [nbegin, nend] self.ip_range_list.append( [nbegin, nend] ) self.ip_range_index.append(self.candidate_amount_ip) num = nend - nbegin self.candidate_amount_ip += num # print ip_utils.ip_num_to_string(nbegin), ip_utils.ip_num_to_string(nend), num self.ip_range_index.sort()
def remove_ip_process(self): try: while connect_control.keep_running: try: ip_str = self.to_remove_ip_list.get_nowait() except: break result = check_ip.test(ip_str) if result and result.appspot_ok: self.add_ip(ip_str, result.handshake_time, result.domain, result.server_type) xlog.debug("remove ip process, restore ip:%s", ip_str) continue if not check_ip.network_is_ok(): self.to_remove_ip_list.put(ip_str) xlog.warn( "network is unreachable. check your network connection." ) return xlog.info("real remove ip:%s ", ip_str) self.iplist_need_save = 1 finally: self.remove_ip_thread_num_lock.acquire() self.remove_ip_thread_num -= 1 self.remove_ip_thread_num_lock.release()
def load(self): ConfigParser.RawConfigParser.OPTCRE = re.compile(r'(?P<option>[^=\s][^=]*)\s*(?P<vi>[=])\s*(?P<value>.*)$') self.DEFAULT_CONFIG = ConfigParser.ConfigParser() DEFAULT_CONFIG_FILENAME = os.path.abspath( os.path.join(current_path, 'proxy.ini')) self.USER_CONFIG = ConfigParser.ConfigParser() CONFIG_USER_FILENAME = os.path.abspath( os.path.join(root_path, 'data', 'gae_proxy', 'config.ini')) try: if os.path.isfile(DEFAULT_CONFIG_FILENAME): self.DEFAULT_CONFIG.read(DEFAULT_CONFIG_FILENAME) self.user_special.scan_ip_thread_num = self.DEFAULT_CONFIG.getint('google_ip', 'max_scan_ip_thread_num') else: return if os.path.isfile(CONFIG_USER_FILENAME): self.USER_CONFIG.read(CONFIG_USER_FILENAME) else: return try: self.user_special.appid = self.USER_CONFIG.get('gae', 'appid') self.user_special.password = self.USER_CONFIG.get('gae', 'password') except: pass try: self.user_special.host_appengine_mode = self.USER_CONFIG.get('hosts', 'appengine.google.com') except: pass try: self.user_special.scan_ip_thread_num = config.CONFIG.getint('google_ip', 'max_scan_ip_thread_num') except: self.user_special.scan_ip_thread_num = self.DEFAULT_CONFIG.getint('google_ip', 'max_scan_ip_thread_num') try: self.user_special.auto_adjust_scan_ip_thread_num = config.CONFIG.getint('google_ip', 'auto_adjust_scan_ip_thread_num') except: pass try: self.user_special.use_ipv6 = config.CONFIG.getint('google_ip', 'use_ipv6') except: pass self.user_special.proxy_enable = self.USER_CONFIG.get('proxy', 'enable') self.user_special.proxy_type = self.USER_CONFIG.get('proxy', 'type') self.user_special.proxy_host = self.USER_CONFIG.get('proxy', 'host') self.user_special.proxy_port = self.USER_CONFIG.get('proxy', 'port') self.user_special.proxy_user = self.USER_CONFIG.get('proxy', 'user') self.user_special.proxy_passwd = self.USER_CONFIG.get('proxy', 'passwd') except Exception as e: xlog.warn("User_config.load except:%s", e)
def set_appid_not_exist(self, appid): xlog.warn("APPID_manager, set_appid_not_exist %s", appid) self.lock.acquire() try: if appid not in self.not_exist_appids: self.not_exist_appids.append(appid) config.GAE_APPIDS.remove(appid) self.working_appid_list.remove(appid) finally: self.lock.release()
def req_deploy_handler(self): global deploy_proc req = urlparse.urlparse(self.path).query reqs = urlparse.parse_qs(req, keep_blank_values=True) data = '' log_path = os.path.abspath(os.path.join(current_path, os.pardir, "server", 'upload.log')) time_now = datetime.datetime.today().strftime('%H:%M:%S-%a/%d/%b/%Y') if reqs['cmd'] == ['deploy']: appid = self.postvars['appid'][0] if deploy_proc and deploy_proc.poll() == None: xlog.warn("deploy is running, request denied.") data = '{"res":"deploy is running", "time":"%s"}' % (time_now) else: try: if os.path.isfile(log_path): os.remove(log_path) script_path = os.path.abspath(os.path.join(current_path, os.pardir, "server", 'uploader.py')) email = self.postvars['email'][0] passwd = self.postvars['passwd'][0] rc4_passwd = self.postvars['rc4_passwd'][0] deploy_proc = subprocess.Popen([sys.executable, script_path, appid, email, passwd, rc4_passwd]) xlog.info("deploy begin.") data = '{"res":"success", "time":"%s"}' % time_now except Exception as e: data = '{"res":"%s", "time":"%s"}' % (e, time_now) elif reqs['cmd'] == ['cancel']: if deploy_proc and deploy_proc.poll() == None: deploy_proc.kill() data = '{"res":"deploy is killed", "time":"%s"}' % (time_now) else: data = '{"res":"deploy is not running", "time":"%s"}' % (time_now) elif reqs['cmd'] == ['get_log']: if deploy_proc and os.path.isfile(log_path): with open(log_path, "r") as f: content = f.read() else: content = "" status = 'init' if deploy_proc: if deploy_proc.poll() == None: status = 'running' else: status = 'finished' data = json.dumps({'status':status,'log':content, 'time':time_now}) self.send_response('text/html', data)
def do_METHOD(self): touch_active() host = self.headers.get('Host', '') host_ip, _, port = host.rpartition(':') if host_ip == "127.0.0.1" and port == str(config.LISTEN_PORT): controler = web_control.ControlHandler(self.client_address, self.headers, self.command, self.path, self.rfile, self.wfile) if self.command == "GET": return controler.do_GET() elif self.command == "POST": return controler.do_POST() else: xlog.warn("method not defined: %s", self.command) return if self.path[0] == '/' and host: self.path = 'http://%s%s' % (host, self.path) elif not host and '://' in self.path: host = urlparse.urlparse(self.path).netloc if host.startswith("127.0.0.1") or host.startswith("localhost"): xlog.warn("Your browser forward localhost to proxy.") return self.forward_local() if self.path == "http://www.twitter.com/xxnet": # for web_ui status page # auto detect browser proxy setting is work data = "OK" return self.wfile.write( 'HTTP/1.1 200\r\nAccess-Control-Allow-Origin: *\r\nContent-Length: %d\r\n\r\n%s' % (len(data), data)) self.parsed_url = urlparse.urlparse(self.path) if host in config.HOSTS_GAE: return self.do_AGENT() if host in config.HOSTS_FWD or host in config.HOSTS_DIRECT: return self.wfile.write( ('HTTP/1.1 301\r\nLocation: %s\r\n\r\n' % self.path.replace('http://', 'https://', 1)).encode()) if host.endswith(config.HOSTS_GAE_ENDSWITH): return self.do_AGENT() if host.endswith(config.HOSTS_FWD_ENDSWITH) or host.endswith( config.HOSTS_DIRECT_ENDSWITH): return self.wfile.write( ('HTTP/1.1 301\r\nLocation: %s\r\n\r\n' % self.path.replace('http://', 'https://', 1)).encode()) return self.do_AGENT()
def report_out_of_quota(self, appid): xlog.warn("report_out_of_quota:%s", appid) self.lock.acquire() try: if appid not in self.out_of_quota_appids: self.out_of_quota_appids.append(appid) self.working_appid_list.remove(appid) except: pass finally: self.lock.release()
def add_ip(self, ip_str, handshake_time, domain=None, server='', fail_times=0): if not isinstance(ip_str, basestring): xlog.error("add_ip input") return if config.USE_IPV6 and ":" not in ip_str: xlog.warn("add %s but ipv6", ip_str) return handshake_time = int(handshake_time) self.ip_lock.acquire() try: if ip_str in self.ip_dict: self.ip_dict[ip_str]['handshake_time'] = handshake_time self.ip_dict[ip_str]['fail_times'] = fail_times if self.ip_dict[ip_str]['fail_time'] > 0: self.ip_dict[ip_str]['fail_time'] = 0 self.good_ip_num += 1 self.append_ip_history(ip_str, handshake_time) return False self.iplist_need_save = 1 self.good_ip_num += 1 self.ip_dict[ip_str] = { 'handshake_time': handshake_time, "fail_times": fail_times, "transfered_data": 0, 'data_active': 0, 'domain': domain, 'server': server, "history": [[time.time(), handshake_time]], "fail_time": 0, "success_time": 0, "get_time": 0, "links": 0 } if 'gws' in server: self.gws_ip_list.append(ip_str) return True except Exception as e: xlog.exception("add_ip err:%s", e) finally: self.ip_lock.release() return False
def head_request(self, ssl_sock): if ssl_sock.host == '': ssl_sock.appid = appid_manager.get_appid() if not ssl_sock.appid: xlog.error("no appid can use") return False host = ssl_sock.appid + ".appspot.com" ssl_sock.host = host else: host = ssl_sock.host # public appid don't keep alive, for quota limit. if ssl_sock.appid.startswith( "xxnet-") and ssl_sock.appid[7:].isdigit(): #logging.info("public appid don't keep alive") #self.keep_alive = 0 return False #logging.debug("head request %s", host) request_data = 'HEAD /_gh/ HTTP/1.1\r\nHost: %s\r\n\r\n' % host response = None try: ssl_sock.settimeout(10) ssl_sock.sock.settimeout(10) data = request_data.encode() ret = ssl_sock.send(data) if ret != len(data): xlog.warn("head send len:%d %d", ret, len(data)) response = httplib.HTTPResponse(ssl_sock, buffering=True) response.begin() status = response.status if status != 200: xlog.debug("app head fail status:%d", status) raise Exception("app check fail %r" % status) return True except httplib.BadStatusLine as e: inactive_time = time.time() - ssl_sock.last_use_time xlog.debug("%s keep alive fail, time:%d", ssl_sock.ip, inactive_time) return False except Exception as e: xlog.warn("%s head %s request fail:%r", ssl_sock.ip, ssl_sock.appid, e) return False finally: if response: response.close()
def ssl_closed(self, ip, reason=""): #xlog.debug("%s ssl_closed:%s", ip, reason) self.ip_lock.acquire() try: if ip in self.ip_dict: if self.ip_dict[ip]['links']: self.ip_dict[ip]['links'] -= 1 self.append_ip_history(ip, "C[%s]"%reason) xlog.warn("ssl_closed %s", ip) except Exception as e: xlog.error("ssl_closed %s err:%s", ip, e) finally: self.ip_lock.release()
def ssl_closed(self, ip, reason=""): #xlog.debug("%s ssl_closed:%s", ip, reason) self.ip_lock.acquire() try: if ip in self.ip_dict: if self.ip_dict[ip]['links']: self.ip_dict[ip]['links'] -= 1 self.append_ip_history(ip, "C[%s]" % reason) xlog.warn("ssl_closed %s", ip) except Exception as e: xlog.error("ssl_closed %s err:%s", ip, e) finally: self.ip_lock.release()
def get_appid(self): if len(self.working_appid_list) == 0: if time.time() - self.last_reset_time < 60: xlog.warn("all appid out of quota, need 1 min to reset") return None else: xlog.warn("reset appid") self.lock.acquire() self.working_appid_list = list(config.GAE_APPIDS) self.out_of_quota_appids = [] self.lock.release() self.last_reset_time = time.time() return random.choice(self.working_appid_list)
def report_not_exist(self, appid): xlog.warn("APPID_manager, report_not_exist %s", appid) self.lock.acquire() try: config.GAE_APPIDS.remove(appid) self.not_exist_appids.append(appid) self.working_appid_list.remove(appid) except: pass finally: self.lock.release() if len(self.working_appid_list) == 0: self.reset_appid()
def do_GET(self): xlog.info('PAC from:%s %s %s ', self.address_string(), self.command, self.path) path = urlparse.urlparse(self.path).path # '/proxy.pac' filename = os.path.normpath('./' + path) # proxy.pac if self.path.startswith(('http://', 'https://')): data = b'HTTP/1.1 200\r\nCache-Control: max-age=86400\r\nExpires:Oct, 01 Aug 2100 00:00:00 GMT\r\nConnection: close\r\n' if filename.endswith(('.jpg', '.gif', '.jpeg', '.bmp')): data += b'Content-Type: image/gif\r\n\r\n' + self.onepixel else: data += b'\r\n This is the Pac server, not proxy port, use 8087 as proxy port.' self.wfile.write(data) xlog.info('%s "%s %s HTTP/1.1" 200 -', self.address_string(), self.command, self.path) return # check for '..', which will leak file if re.search(r'(\.{2})', self.path) is not None: self.wfile.write(b'HTTP/1.1 404\r\n\r\n') xlog.warn('%s %s %s haking', self.address_string(), self.command, self.path) return if filename != 'proxy.pac': xlog.warn("pac_server GET %s fail", filename) self.wfile.write(b'HTTP/1.1 404\r\n\r\n') return mimetype = 'text/plain' if self.path.endswith('.pac?flush') or time.time() - os.path.getmtime( get_serving_pacfile()) > config.PAC_EXPIRED: thread.start_new_thread(PacUtil.update_pacfile, (user_pacfile, )) pac_filename = get_serving_pacfile() with open(pac_filename, 'rb') as fp: data = fp.read() host = self.headers.getheader('Host') host, _, port = host.rpartition(":") gae_proxy_proxy = host + ":" + str(config.LISTEN_PORT) pac_proxy = host + ":" + str(config.PAC_PORT) data = data.replace(gae_proxy_listen, gae_proxy_proxy) data = data.replace(pac_listen, pac_proxy) self.wfile.write( ('HTTP/1.1 200\r\nContent-Type: %s\r\nContent-Length: %s\r\n\r\n' % (mimetype, len(data))).encode()) self.wfile.write(data)
def head_request(self, ssl_sock): if ssl_sock.host == '': ssl_sock.appid = appid_manager.get_appid() if not ssl_sock.appid: xlog.error("no appid can use") return False host = ssl_sock.appid + ".appspot.com" ssl_sock.host = host else: host = ssl_sock.host # public appid don't keep alive, for quota limit. if ssl_sock.appid.startswith("xxnet-") and ssl_sock.appid[7:].isdigit(): #logging.info("public appid don't keep alive") #self.keep_alive = 0 return False #logging.debug("head request %s", host) request_data = 'HEAD /_gh/ HTTP/1.1\r\nHost: %s\r\n\r\n' % host response = None try: ssl_sock.settimeout(10) ssl_sock.sock.settimeout(10) data = request_data.encode() ret = ssl_sock.send(data) if ret != len(data): xlog.warn("head send len:%d %d", ret, len(data)) response = httplib.HTTPResponse(ssl_sock, buffering=True) response.begin() status = response.status if status != 200: xlog.debug("app head fail status:%d", status) raise Exception("app check fail %r" % status) return True except httplib.BadStatusLine as e: inactive_time = time.time() - ssl_sock.last_use_time xlog.debug("%s keep alive fail, time:%d", ssl_sock.ip, inactive_time) return False except Exception as e: xlog.warn("%s head %s request fail:%r", ssl_sock.ip, ssl_sock.appid, e) return False finally: if response: response.close()
def network_is_ok(): global checking_lock, checking_num, network_ok, last_check_time, check_network_interval if time.time() - last_check_time < check_network_interval: return network_ok if time.time() - last_ok_time < check_network_interval: return True if checking_num > 0: return network_ok if config.PROXY_ENABLE: socket.socket = socks.socksocket xlog.debug("patch socks") checking_lock.acquire() checking_num += 1 checking_lock.release() try: conn = httplib.HTTPSConnection("github.com", 443, timeout=30) header = {"user-agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Safari/537.36", "accept":"application/json, text/javascript, */*; q=0.01", "accept-encoding":"gzip, deflate, sdch", "accept-language":'en-US,en;q=0.8,ja;q=0.6,zh-CN;q=0.4,zh;q=0.2', "connection":"keep-alive" } conn.request("HEAD", "/", headers=header) response = conn.getresponse() if response.status: xlog.debug("network is ok") network_ok = True last_check_time = time.time() return True except: pass finally: checking_lock.acquire() checking_num -= 1 checking_lock.release() if config.PROXY_ENABLE: socket.socket = default_socket xlog.debug("restore socket") xlog.warn("network fail.") network_ok = False last_check_time = time.time() return False
def network_is_ok(): global checking_lock, checking_num, network_ok, last_check_time, check_network_interval if time.time() - last_check_time < check_network_interval: return network_ok if checking_num > 0: return network_ok if config.PROXY_ENABLE: socket.socket = socks.socksocket xlog.debug("patch socks") checking_lock.acquire() checking_num += 1 checking_lock.release() try: conn = httplib.HTTPSConnection("github.com", 443, timeout=30) header = { "user-agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Safari/537.36", "accept": "application/json, text/javascript, */*; q=0.01", "accept-encoding": "gzip, deflate, sdch", "accept-language": 'en-US,en;q=0.8,ja;q=0.6,zh-CN;q=0.4,zh;q=0.2', "connection": "keep-alive" } conn.request("HEAD", "/", headers=header) response = conn.getresponse() if response.status: xlog.debug("network is ok") network_ok = True last_check_time = time.time() return True except: pass finally: checking_lock.acquire() checking_num -= 1 checking_lock.release() if config.PROXY_ENABLE: socket.socket = default_socket xlog.debug("restore socket") xlog.warn("network fail.") network_ok = False last_check_time = time.time() return False
def save(self): CONFIG_USER_FILENAME = os.path.abspath( os.path.join(root_path, 'data', 'gae_proxy', 'config.ini')) try: f = open(CONFIG_USER_FILENAME, 'w') if self.user_special.appid != "": f.write("[gae]\n") f.write("appid = %s\n" % self.user_special.appid) f.write("password = %s\n\n" % self.user_special.password) f.write("[proxy]\n") f.write("enable = %s\n" % self.user_special.proxy_enable) f.write("type = %s\n" % self.user_special.proxy_type) f.write("host = %s\n" % self.user_special.proxy_host) f.write("port = %s\n" % self.user_special.proxy_port) f.write("user = %s\n" % self.user_special.proxy_user) f.write("passwd = %s\n\n" % self.user_special.proxy_passwd) if self.user_special.host_appengine_mode != "gae": f.write("[hosts]\n") f.write("appengine.google.com = %s\n" % self.user_special.host_appengine_mode) f.write("www.google.com = %s\n\n" % self.user_special.host_appengine_mode) f.write("[google_ip]\n") if int(self.user_special.auto_adjust_scan_ip_thread_num ) != self.DEFAULT_CONFIG.getint( 'google_ip', 'auto_adjust_scan_ip_thread_num'): f.write("auto_adjust_scan_ip_thread_num = %d\n\n" % int(self.user_special.auto_adjust_scan_ip_thread_num)) if int(self.user_special. scan_ip_thread_num) != self.DEFAULT_CONFIG.getint( 'google_ip', 'max_scan_ip_thread_num'): f.write("max_scan_ip_thread_num = %d\n\n" % int(self.user_special.scan_ip_thread_num)) if int(self.user_special.use_ipv6) != self.DEFAULT_CONFIG.getint( 'google_ip', 'use_ipv6'): f.write("use_ipv6 = %d\n\n" % int(self.user_special.use_ipv6)) f.close() except: xlog.warn("launcher.config save user config fail:%s", CONFIG_USER_FILENAME)
def update_ip(self, ip, handshake_time): if not isinstance(ip, basestring): xlog.error("set_ip input") return handshake_time = int(handshake_time) if handshake_time < 5: # that's impossible xlog.warn("%s handshake:%d impossible", ip, 1000 * handshake_time) return time_now = time.time() check_ip.network_stat = "OK" check_ip.last_check_time = time_now check_ip.continue_fail_count = 0 self.ip_lock.acquire() try: if ip in self.ip_dict: # Case: some good ip, average handshake time is 300ms # some times ip package lost cause handshake time become 2000ms # this ip will not return back to good ip front until all become bad # There for, prevent handshake time increase too quickly. org_time = self.ip_dict[ip]['handshake_time'] if handshake_time - org_time > 500: self.ip_dict[ip]['handshake_time'] = org_time + 500 else: self.ip_dict[ip]['handshake_time'] = handshake_time self.ip_dict[ip]['success_time'] = time_now if self.ip_dict[ip]['fail_times'] > 0: self.good_ip_num += 1 self.ip_dict[ip]['fail_times'] = 0 self.append_ip_history(ip, handshake_time) self.ip_dict[ip]["fail_time"] = 0 self.iplist_need_save = 1 #logging.debug("update ip:%s not exist", ip) except Exception as e: xlog.error("update_ip err:%s", e) finally: self.ip_lock.release() self.save_ip_list()
def do_METHOD(self): touch_active() host = self.headers.get('Host', '') host_ip, _, port = host.rpartition(':') if host_ip == "127.0.0.1" and port == str(config.LISTEN_PORT): controler = web_control.ControlHandler(self.client_address, self.headers, self.command, self.path, self.rfile, self.wfile) if self.command == "GET": return controler.do_GET() elif self.command == "POST": return controler.do_POST() else: xlog.warn("method not defined: %s", self.command) return if self.path[0] == '/' and host: self.path = 'http://%s%s' % (host, self.path) elif not host and '://' in self.path: host = urlparse.urlparse(self.path).netloc if host.startswith("127.0.0.1") or host.startswith("localhost"): xlog.warn("Your browser forward localhost to proxy.") return self.forward_local() if self.path == "http://www.twitter.com/xxnet": # for web_ui status page # auto detect browser proxy setting is work data = "OK" return self.wfile.write('HTTP/1.1 200\r\nAccess-Control-Allow-Origin: *\r\nContent-Length: %d\r\n\r\n%s' %(len(data), data) ) self.parsed_url = urlparse.urlparse(self.path) if host in config.HOSTS_GAE: return self.do_AGENT() if host in config.HOSTS_FWD or host in config.HOSTS_DIRECT: return self.wfile.write(('HTTP/1.1 301\r\nLocation: %s\r\n\r\n' % self.path.replace('http://', 'https://', 1)).encode()) if host.endswith(config.HOSTS_GAE_ENDSWITH): return self.do_AGENT() if host.endswith(config.HOSTS_FWD_ENDSWITH) or host.endswith(config.HOSTS_DIRECT_ENDSWITH): return self.wfile.write(('HTTP/1.1 301\r\nLocation: %s\r\n\r\n' % self.path.replace('http://', 'https://', 1)).encode()) return self.do_AGENT()
def add_ip(self, ip_str, handshake_time, domain=None, server="", fail_times=0): if not isinstance(ip_str, basestring): xlog.error("add_ip input") return if config.USE_IPV6 and ":" not in ip_str: xlog.warn("add %s but ipv6", ip_str) return handshake_time = int(handshake_time) self.ip_lock.acquire() try: if ip_str in self.ip_dict: self.ip_dict[ip_str]["handshake_time"] = handshake_time self.ip_dict[ip_str]["fail_times"] = fail_times self.ip_dict[ip_str]["fail_time"] = 0 self.append_ip_history(ip_str, handshake_time) return False self.iplist_need_save = 1 self.good_ip_num += 1 self.ip_dict[ip_str] = { "handshake_time": handshake_time, "fail_times": fail_times, "transfered_data": 0, "data_active": 0, "domain": domain, "server": server, "history": [[time.time(), handshake_time]], "fail_time": 0, "success_time": 0, "get_time": 0, "links": 0, } if "gws" in server: self.gws_ip_list.append(ip_str) return True except Exception as e: xlog.exception("add_ip err:%s", e) finally: self.ip_lock.release() return False
def do_CONNECT_FWD(self): """socket forward for http CONNECT command""" host, _, port = self.path.rpartition(':') port = int(port) xlog.info('FWD %s %s:%d ', self.command, host, port) if host == "appengine.google.com" or host == "www.google.com": connected_in_s = 5 # gae_proxy upload to appengine is slow, it need more 'fresh' connection. else: connected_in_s = 10 # gws connect can be used after tcp connection created 15 s try: self.wfile.write(b'HTTP/1.1 200 OK\r\n\r\n') data = self.connection.recv(1024) except Exception as e: xlog.exception('do_CONNECT_FWD (%r, %r) Exception:%s', host, port, e) self.connection.close() return remote = forwork_manager.create_connection(host=host, port=port, sock_life=connected_in_s) if remote is None: self.connection.close() xlog.warn('FWD %s %s:%d create_connection fail', self.command, host, port) return try: if data: remote.send(data) except Exception as e: xlog.exception('do_CONNECT_FWD (%r, %r) Exception:%s', host, port, e) self.connection.close() remote.close() return # reset timeout default to avoid long http upload failure, but it will delay timeout retry :( remote.settimeout(None) forwork_manager.forward_socket(self.connection, remote, bufsize=self.bufsize) xlog.debug('FWD %s %s:%d with closed', self.command, host, port)
def do_GET(self): xlog.info('PAC from:%s %s %s ', self.address_string(), self.command, self.path) path = urlparse.urlparse(self.path).path # '/proxy.pac' filename = os.path.normpath('./' + path) # proxy.pac if self.path.startswith(('http://', 'https://')): data = b'HTTP/1.1 200\r\nCache-Control: max-age=86400\r\nExpires:Oct, 01 Aug 2100 00:00:00 GMT\r\nConnection: close\r\n' if filename.endswith(('.jpg', '.gif', '.jpeg', '.bmp')): data += b'Content-Type: image/gif\r\n\r\n' + self.onepixel else: data += b'\r\n This is the Pac server, not proxy port, use 8087 as proxy port.' self.wfile.write(data) xlog.info('%s "%s %s HTTP/1.1" 200 -', self.address_string(), self.command, self.path) return # check for '..', which will leak file if re.search(r'(\.{2})', self.path) is not None: self.wfile.write(b'HTTP/1.1 404\r\n\r\n') xlog.warn('%s %s %s haking', self.address_string(), self.command, self.path ) return if filename != 'proxy.pac': xlog.warn("pac_server GET %s fail", filename) self.wfile.write(b'HTTP/1.1 404\r\n\r\n') return mimetype = 'text/plain' if self.path.endswith('.pac?flush') or time.time() - os.path.getmtime(get_serving_pacfile()) > config.PAC_EXPIRED: thread.start_new_thread(PacUtil.update_pacfile, (user_pacfile,)) pac_filename = get_serving_pacfile() with open(pac_filename, 'rb') as fp: data = fp.read() host = self.headers.getheader('Host') host, _, port = host.rpartition(":") gae_proxy_proxy = host + ":" + str(config.LISTEN_PORT) pac_proxy = host + ":" + str(config.PAC_PORT) data = data.replace(gae_proxy_listen, gae_proxy_proxy) data = data.replace(pac_listen, pac_proxy) self.wfile.write(('HTTP/1.1 200\r\nContent-Type: %s\r\nContent-Length: %s\r\n\r\n' % (mimetype, len(data))).encode()) self.wfile.write(data)
def do_POST(self): try: refer = self.headers.getheader('Referer') netloc = urlparse.urlparse(refer).netloc if not netloc.startswith("127.0.0.1") and not netloc.startswitch( "localhost"): xlog.warn("web control ref:%s refuse", netloc) return except: pass xlog.debug('GAEProxy web_control %s %s %s ', self.address_string(), self.command, self.path) try: ctype, pdict = cgi.parse_header( self.headers.getheader('content-type')) if ctype == 'multipart/form-data': self.postvars = cgi.parse_multipart(self.rfile, pdict) elif ctype == 'application/x-www-form-urlencoded': length = int(self.headers.getheader('content-length')) self.postvars = urlparse.parse_qs(self.rfile.read(length), keep_blank_values=1) else: self.postvars = {} except: self.postvars = {} path = urlparse.urlparse(self.path).path if path == '/deploy': return self.req_deploy_handler() elif path == "/config": return self.req_config_handler() elif path == "/scan_ip": return self.req_scan_ip_handler() elif path.startswith("/importip"): return self.req_importip_handler() else: self.wfile.write( b'HTTP/1.1 404\r\nContent-Type: text/plain\r\nConnection: close\r\n\r\n404 Not Found' ) xlog.info('%s "%s %s HTTP/1.1" 404 -', self.address_string(), self.command, self.path)
def simple_check_worker(): global checking_lock, checking_num, network_stat, last_check_time time_now = time.time() if config.PROXY_ENABLE: socket.socket = socks.socksocket xlog.debug("patch socks") checking_lock.acquire() checking_num += 1 checking_lock.release() try: conn = httplib.HTTPConnection("www.baidu.com", 80, timeout=3) header = { "user-agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Safari/537.36", "accept": "application/json, text/javascript, */*; q=0.01", "accept-encoding": "gzip, deflate, sdch", "accept-language": 'en-US,en;q=0.8,ja;q=0.6,zh-CN;q=0.4,zh;q=0.2', "connection": "keep-alive" } conn.request("HEAD", "/", headers=header) response = conn.getresponse() if response.status: last_check_time = time.time() report_network_ok() xlog.debug("network is ok, cost:%d ms", 1000 * (time.time() - time_now)) return True except Exception as e: xlog.warn("network fail:%r", e) network_stat = "Fail" last_check_time = time.time() return False finally: checking_lock.acquire() checking_num -= 1 checking_lock.release() if config.PROXY_ENABLE: socket.socket = default_socket xlog.debug("restore socket")
def adjust_scan_thread_num(self): if not self.auto_adjust_scan_ip_thread_num: scan_ip_thread_num = self.max_scan_ip_thread_num elif len(self.gws_ip_list) < 100: scan_ip_thread_num = self.max_scan_ip_thread_num else: try: the_100th_ip = self.gws_ip_list[99] the_100th_handshake_time = self.ip_dict[the_100th_ip]['handshake_time'] scan_ip_thread_num = int( (the_100th_handshake_time - 200)/2 * self.max_scan_ip_thread_num/50 ) except Exception as e: xlog.warn("adjust_scan_thread_num fail:%r", e) return if scan_ip_thread_num > self.max_scan_ip_thread_num: scan_ip_thread_num = self.max_scan_ip_thread_num if scan_ip_thread_num != self.scan_ip_thread_num: xlog.info("Adjust scan thread num from %d to %d", self.scan_ip_thread_num, scan_ip_thread_num) self.scan_ip_thread_num = scan_ip_thread_num self.search_more_google_ip()
def add_ip(self, ip, handshake_time, domain=None, server='', fail_times=0): if not isinstance(ip, basestring): xlog.error("add_ip input") return if config.USE_IPV6 and ":" not in ip: xlog.warn("add %s but ipv6", ip) return handshake_time = int(handshake_time) self.ip_lock.acquire() try: if ip in self.ip_dict: self.ip_dict[ip]['handshake_time'] = handshake_time self.ip_dict[ip]['fail_times'] = fail_times if self.ip_dict[ip]['fail_time'] > 0: self.ip_dict[ip]['fail_time'] = 0 self.good_ip_num += 1 self.append_ip_history(ip, handshake_time) return False self.iplist_need_save = 1 self.good_ip_num += 1 self.ip_dict[ip] = {'handshake_time':handshake_time, "fail_times":fail_times, "transfered_data":0, 'data_active':0, 'domain':domain, 'server':server, "history":[[time.time(), handshake_time]], "fail_time":0, "success_time":0, "get_time":0, "links":0} if 'gws' in server: self.gws_ip_list.append(ip) return True except Exception as e: xlog.exception("add_ip err:%s", e) finally: self.ip_lock.release() return False
def do_AGENT(self): def get_crlf(rfile): crlf = rfile.readline(2) if crlf != "\r\n": xlog.warn("chunk header read fail crlf") request_headers = dict((k.title(), v) for k, v in self.headers.items()) payload = b'' if 'Content-Length' in request_headers: try: payload_len = int(request_headers.get('Content-Length', 0)) #logging.debug("payload_len:%d %s %s", payload_len, self.command, self.path) payload = self.rfile.read(payload_len) except NetWorkIOError as e: xlog.error('handle_method_urlfetch read payload failed:%s', e) return elif 'Transfer-Encoding' in request_headers: # chunked, used by facebook android client payload = "" while True: chunk_size_str = self.rfile.readline(65537) chunk_size_list = chunk_size_str.split(";") chunk_size = int("0x" + chunk_size_list[0], 0) if len(chunk_size_list) > 1 and chunk_size_list[1] != "\r\n": xlog.warn("chunk ext: %s", chunk_size_str) if chunk_size == 0: while True: line = self.rfile.readline(65537) if line == "\r\n": break else: xlog.warn("entity header:%s", line) break payload += self.rfile.read(chunk_size) get_crlf(self.rfile) gae_handler.handler(self.command, self.path, request_headers, payload, self.wfile)
def _request(sock, headers, payload, bufsize=8192): request_data = 'POST /_gh/ HTTP/1.1\r\n' request_data += ''.join('%s: %s\r\n' % (k, v) for k, v in headers.items() if k not in skip_headers) request_data += '\r\n' if isinstance(payload, bytes): sock.send(request_data.encode()) payload_len = len(payload) start = 0 while start < payload_len: send_size = min(payload_len - start, 65535) sended = sock.send(payload[start:start + send_size]) start += sended elif hasattr(payload, 'read'): sock.send(request_data) while True: data = payload.read(bufsize) if not data: break sock.send(data) else: raise TypeError( '_request(payload) must be a string or buffer, not %r' % type(payload)) response = httplib.HTTPResponse(sock, buffering=True) try: orig_timeout = sock.gettimeout() sock.settimeout(100) response.begin() sock.settimeout(orig_timeout) except httplib.BadStatusLine as e: #logging.warn("_request bad status line:%r", e) response.close() response = None except Exception as e: xlog.warn("_request:%r", e) return response
def simple_check_worker(): global checking_lock, checking_num, network_stat, last_check_time time_now = time.time() if config.PROXY_ENABLE: socket.socket = socks.socksocket xlog.debug("patch socks") checking_lock.acquire() checking_num += 1 checking_lock.release() try: conn = httplib.HTTPConnection("www.baidu.com", 80, timeout=3) header = {"user-agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Safari/537.36", "accept":"application/json, text/javascript, */*; q=0.01", "accept-encoding":"gzip, deflate, sdch", "accept-language":'en-US,en;q=0.8,ja;q=0.6,zh-CN;q=0.4,zh;q=0.2', "connection":"keep-alive" } conn.request("HEAD", "/", headers=header) response = conn.getresponse() if response.status: last_check_time = time.time() report_network_ok() xlog.debug("network is ok, cost:%d ms", 1000*(time.time() - time_now)) return True except Exception as e: xlog.warn("network fail:%r", e) network_stat = "Fail" last_check_time = time.time() return False finally: checking_lock.acquire() checking_num -= 1 checking_lock.release() if config.PROXY_ENABLE: socket.socket = default_socket xlog.debug("restore socket")
def do_AGENT(self): def get_crlf(rfile): crlf = rfile.readline(2) if crlf != "\r\n": xlog.warn("chunk header read fail crlf") request_headers = dict((k.title(), v) for k, v in self.headers.items()) payload = b'' if 'Content-Length' in request_headers: try: payload_len = int(request_headers.get('Content-Length', 0)) #logging.debug("payload_len:%d %s %s", payload_len, self.command, self.path) payload = self.rfile.read(payload_len) except NetWorkIOError as e: xlog.error('handle_method_urlfetch read payload failed:%s', e) return elif 'Transfer-Encoding' in request_headers: # chunked, used by facebook android client payload = "" while True: chunk_size_str = self.rfile.readline(65537) chunk_size_list = chunk_size_str.split(";") chunk_size = int("0x"+chunk_size_list[0], 0) if len(chunk_size_list) > 1 and chunk_size_list[1] != "\r\n": xlog.warn("chunk ext: %s", chunk_size_str) if chunk_size == 0: while True: line = self.rfile.readline(65537) if line == "\r\n": break else: xlog.warn("entity header:%s", line) break payload += self.rfile.read(chunk_size) get_crlf(self.rfile) gae_handler.handler(self.command, self.path, request_headers, payload, self.wfile)
def test_appid_exist(ssl_sock, appid): request_data = 'GET /_gh/ HTTP/1.1\r\nHost: %s.appspot.com\r\n\r\n' % appid ssl_sock.send(request_data.encode()) response = httplib.HTTPResponse(ssl_sock, buffering=True) response.begin() if response.status == 404: #xlog.warn("app check %s status:%d", appid, response.status) return False if response.status == 503: # out of quota return True if response.status != 200: xlog.warn("test appid %s status:%d", appid, response.status) content = response.read() if "GoAgent" not in content: #xlog.warn("app check %s content:%s", appid, content) return False return True
def test_appid_exist(ssl_sock, appid): request_data = 'GET / HTTP/1.1\r\nHost: %s.appspot.com\r\n\r\n' % appid ssl_sock.send(request_data.encode()) response = httplib.HTTPResponse(ssl_sock, buffering=True) response.begin() if response.status == 404: #xlog.warn("app check %s status:%d", appid, response.status) return False if response.status == 503: # out of quota return True if response.status != 200: xlog.warn("test appid %s status:%d", appid, response.status) content = response.read() if "GoAgent" not in content: #xlog.warn("app check %s content:%s", appid, content) return False return True
def _request(sock, headers, payload, bufsize=8192): request_data = 'POST /_gh/ HTTP/1.1\r\n' request_data += ''.join('%s: %s\r\n' % (k, v) for k, v in headers.items() if k not in skip_headers) request_data += '\r\n' if isinstance(payload, bytes): sock.send(request_data.encode()) payload_len = len(payload) start = 0 while start < payload_len: send_size = min(payload_len - start, 65535) sended = sock.send(payload[start:start+send_size]) start += sended elif hasattr(payload, 'read'): sock.send(request_data) while True: data = payload.read(bufsize) if not data: break sock.send(data) else: raise TypeError('_request(payload) must be a string or buffer, not %r' % type(payload)) response = httplib.HTTPResponse(sock, buffering=True) try: orig_timeout = sock.gettimeout() sock.settimeout(100) response.begin() sock.settimeout(orig_timeout) except httplib.BadStatusLine as e: #logging.warn("_request bad status line:%r", e) response.close() response = None except Exception as e: xlog.warn("_request:%r", e) return response
def do_GET(self): path = urlparse.urlparse(self.path).path if path == "/log": return self.req_log_handler() elif path == "/status": return self.req_status_handler() else: xlog.debug('GAEProxy Web_control %s %s %s ', self.address_string(), self.command, self.path) if path == '/deploy': return self.req_deploy_handler() elif path == "/config": return self.req_config_handler() elif path == "/ip_list": return self.req_ip_list_handler() elif path == "/scan_ip": return self.req_scan_ip_handler() elif path == "/ssl_pool": return self.req_ssl_pool_handler() elif path == "/download_cert": return self.req_download_cert_handler() elif path == "/is_ready": return self.req_is_ready_handler() elif path == "/test_ip": return self.req_test_ip_handler() elif path == "/check_ip": return self.req_check_ip_handler() elif path == "/quit": connect_control.keep_running = False data = "Quit" self.wfile.write(('HTTP/1.1 200\r\nContent-Type: %s\r\nContent-Length: %s\r\n\r\n' % ('text/plain', len(data))).encode()) self.wfile.write(data) #sys.exit(0) #quit() #os._exit(0) return elif path.startswith("/wizard/"): file_path = os.path.abspath(os.path.join(web_ui_path, '/'.join(path.split('/')[1:]))) if not os.path.isfile(file_path): self.wfile.write(b'HTTP/1.1 404 Not Found\r\n\r\n') xlog.warn('%s %s %s wizard file %s not found', self.address_string(), self.command, self.path, file_path) return if file_path.endswith('.html'): mimetype = 'text/html' elif file_path.endswith('.png'): mimetype = 'image/png' elif file_path.endswith('.jpg') or file_path.endswith('.jpeg'): mimetype = 'image/jpeg' else: mimetype = 'application/octet-stream' self.send_file(file_path, mimetype) return else: xlog.warn('Control Req %s %s %s ', self.address_string(), self.command, self.path) # check for '..', which will leak file if re.search(r'(\.{2})', self.path) is not None: self.wfile.write(b'HTTP/1.1 404\r\n\r\n') xlog.warn('%s %s %s haking', self.address_string(), self.command, self.path ) return filename = os.path.normpath('./' + path) if self.path.startswith(('http://', 'https://')): data = b'HTTP/1.1 200\r\nCache-Control: max-age=86400\r\nExpires:Oct, 01 Aug 2100 00:00:00 GMT\r\nConnection: close\r\n' data += b'\r\n' self.wfile.write(data) xlog.info('%s "%s %s HTTP/1.1" 200 -', self.address_string(), self.command, self.path) elif os.path.isfile(filename): if filename.endswith('.pac'): mimetype = 'text/plain' else: mimetype = 'application/octet-stream' #self.send_file(filename, mimetype) else: self.wfile.write(b'HTTP/1.1 404\r\nContent-Type: text/plain\r\nConnection: close\r\n\r\n404 Not Found') xlog.info('%s "%s %s HTTP/1.1" 404 -', self.address_string(), self.command, self.path)
def create_connection(self, host="", port=443, sock_life=5): if port != 443: xlog.warn("forward port %d not supported.", port) return None def _create_connection(ip_port, delay=0): time.sleep(delay) ip = ip_port[0] sock = None # start connection time record start_time = time.time() conn_time = 0 connect_control.start_connect_register(high_prior=True) try: # create a ipv4/ipv6 socket object if config.PROXY_ENABLE: sock = socks.socksocket(socket.AF_INET if ':' not in ip else socket.AF_INET6) else: sock = socket.socket(socket.AF_INET if ':' not in ip else socket.AF_INET6) # set reuseaddr option to avoid 10048 socket error sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # resize socket recv buffer 8K->32K to improve browser releated application performance sock.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 32 * 1024) # disable negal algorithm to send http request quickly. sock.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, True) # set a short timeout to trigger timeout retry more quickly. sock.settimeout(self.timeout) # TCP connect sock.connect(ip_port) # record TCP connection time conn_time = time.time() - start_time xlog.debug("tcp conn %s time:%d", ip, conn_time * 1000) google_ip.update_ip(ip, conn_time * 2000) #logging.info("create_tcp update ip:%s time:%d", ip, conn_time * 2000) # put ssl socket object to output queobj #sock.ip = ip self.tcp_connection_cache.put((time.time(), sock)) except Exception as e: conn_time = int((time.time() - start_time) * 1000) xlog.debug("tcp conn %s fail t:%d", ip, conn_time) google_ip.report_connect_fail(ip) #logging.info("create_tcp report fail ip:%s", ip) if sock: sock.close() finally: self.thread_num_lock.acquire() self.thread_num -= 1 self.thread_num_lock.release() connect_control.end_connect_register(high_prior=True) if host != "appengine.google.com": while True: try: ctime, sock = self.tcp_connection_cache.get_nowait() if time.time() - ctime < sock_life: return sock else: sock.close() continue except Queue.Empty: break start_time = time.time() while time.time() - start_time < self.max_timeout: if self.thread_num < self.max_thread_num: if host == "appengine.google.com": ip = google_ip.get_host_ip("*.google.com") else: ip = google_ip.get_gws_ip() if not ip: xlog.error("no gws ip.") return addr = (ip, port) self.thread_num_lock.acquire() self.thread_num += 1 self.thread_num_lock.release() p = threading.Thread(target=_create_connection, args=(addr, )) p.start() try: ctime, sock = self.tcp_connection_cache.get(timeout=0.2) return sock except: continue xlog.warning('create tcp connection fail.')
def handler(method, host, url, headers, body, wfile): time_request = time.time() if "Connection" in headers and headers["Connection"] == "close": del headers["Connection"] errors = [] response = None while True: if time.time() - time_request > 30: return return_fail_message(wfile) try: response = fetch(method, host, url, headers, body) if response: if response.status > 400: server_type = response.getheader('Server', "") if "gws" not in server_type and "Google Frontend" not in server_type and "GFE" not in server_type: xlog.warn("IP:%s not support GAE, server type:%s status:%d", response.ssl_sock.ip, server_type, response.status) google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True) response.close() continue break except OpenSSL.SSL.SysCallError as e: errors.append(e) xlog.warn("direct_handler.handler err:%r %s/%s", e, host, url) except Exception as e: errors.append(e) xlog.exception('direct_handler.handler %r %s %s , retry...', e, host, url) try: send_to_browser = True try: response_headers = dict((k.title(), v) for k, v in response.getheaders()) wfile.write("HTTP/1.1 %d %s\r\n" % (response.status, response.reason)) for key, value in response.getheaders(): send_header(wfile, key, value) wfile.write("\r\n") except Exception as e: send_to_browser = False wait_time = time.time()-time_request xlog.warn("direct_handler.handler send response fail. t:%d e:%r %s%s", wait_time, e, host, url) if method == 'HEAD' or response.status in (204, 304): xlog.info("DIRECT t:%d %d %s %s", (time.time()-time_request)*1000, response.status, host, url) https_manager.save_ssl_connection_for_reuse(response.ssl_sock, host) response.close() return if 'Transfer-Encoding' in response_headers: length = 0 while True: try: data = response.read(8192) except httplib.IncompleteRead, e: data = e.partial except Exception as e: google_ip.report_connect_closed(response.ssl_sock.ip, "receive fail") xlog.warn("direct_handler.handler send Transfer-Encoding t:%d e:%r %s/%s", time.time()-time_request, e, host, url) response.close() return if send_to_browser: try: if not data: wfile.write('0\r\n\r\n') break length += len(data) wfile.write('%x\r\n' % len(data)) wfile.write(data) wfile.write('\r\n') except Exception as e: send_to_browser = False xlog.warn("direct_handler.handler send Transfer-Encoding t:%d e:%r %s/%s", time.time()-time_request, e, host, url) else: if not data: break
def do_CONNECT_AGENT(self): """deploy fake cert to client""" # GAE supports the following HTTP methods: GET, POST, HEAD, PUT, DELETE, and PATCH host, _, port = self.path.rpartition(':') port = int(port) certfile = CertUtil.get_cert(host) xlog.info('GAE %s %s:%d ', self.command, host, port) self.__realconnection = None self.wfile.write(b'HTTP/1.1 200 OK\r\n\r\n') try: ssl_sock = ssl.wrap_socket(self.connection, keyfile=certfile, certfile=certfile, server_side=True) except ssl.SSLError as e: xlog.info('ssl error: %s, create full domain cert for host:%s', e, host) certfile = CertUtil.get_cert(host, full_name=True) return except Exception as e: if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET): xlog.exception( 'ssl.wrap_socket(self.connection=%r) failed: %s path:%s, errno:%s', self.connection, e, self.path, e.args[0]) return self.__realconnection = self.connection self.__realwfile = self.wfile self.__realrfile = self.rfile self.connection = ssl_sock self.rfile = self.connection.makefile('rb', self.bufsize) self.wfile = self.connection.makefile('wb', 0) try: self.raw_requestline = self.rfile.readline(65537) if len(self.raw_requestline) > 65536: self.requestline = '' self.request_version = '' self.command = '' self.send_error(414) xlog.warn("read request line len:%d", len(self.raw_requestline)) return if not self.raw_requestline: xlog.warn("read request line empty") return if not self.parse_request(): xlog.warn("parse request fail:%s", self.raw_requestline) return except NetWorkIOError as e: if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET, errno.EPIPE): xlog.exception( 'ssl.wrap_socket(self.connection=%r) failed: %s path:%s, errno:%s', self.connection, e, self.path, e.args[0]) raise if self.path[0] == '/' and host: self.path = 'https://%s%s' % (self.headers['Host'], self.path) if self.path == "https://www.twitter.com/xxnet": # for web_ui status page # auto detect browser proxy setting is work xlog.debug("CONNECT %s %s", self.command, self.path) return self.wfile.write(self.self_check_response_data) xlog.debug('GAE CONNECT %s %s', self.command, self.path) if self.command not in self.gae_support_methods: if host.endswith(".google.com") or host.endswith( config.HOSTS_DIRECT_ENDSWITH) or host.endswith( config.HOSTS_GAE_ENDSWITH): if host in config.HOSTS_GAE: gae_set = [s for s in config.HOSTS_GAE] gae_set.remove(host) config.HOSTS_GAE = tuple(gae_set) if host not in config.HOSTS_DIRECT: fwd_set = [s for s in config.HOSTS_DIRECT] fwd_set.append(host) config.HOSTS_DIRECT = tuple(fwd_set) xlog.warn( "Method %s not support in GAE, Redirect to DIRECT for %s", self.command, self.path) return self.wfile.write( ('HTTP/1.1 301\r\nLocation: %s\r\n\r\n' % self.path).encode()) else: xlog.warn("Method %s not support in GAEProxy for %s", self.command, self.path) return self.wfile.write( ('HTTP/1.1 404 Not Found\r\n\r\n').encode()) try: if self.path[0] == '/' and host: self.path = 'http://%s%s' % (host, self.path) elif not host and '://' in self.path: host = urlparse.urlparse(self.path).netloc self.parsed_url = urlparse.urlparse(self.path) return self.do_AGENT() except NetWorkIOError as e: if e.args[0] not in (errno.ECONNABORTED, errno.ETIMEDOUT, errno.EPIPE): raise finally: if self.__realconnection: try: self.__realconnection.shutdown(socket.SHUT_WR) self.__realconnection.close() except NetWorkIOError: pass finally: self.__realconnection = None
def handler(method, url, headers, body, wfile): time_request = time.time() headers = clean_empty_header(headers) errors = [] response = None while True: if time.time() - time_request > 30: #time out return return_fail_message(wfile) try: response = fetch(method, url, headers, body) if response.app_status != 200: xlog.warn("fetch gae status:%s url:%s", response.app_status, url) try: server_type = response.getheader('Server', "") if "gws" not in server_type and "Google Frontend" not in server_type: xlog.warn("IP:%s not support GAE, server type:%s", response.ssl_sock.ip, server_type) google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True) response.close() continue except Exception as e: errors.append(e) xlog.warn('gae_handler.handler %r %s , retry...', e, url) continue if response.app_status == 404: xlog.warning('APPID %r not exists, remove it.', response.ssl_sock.appid) appid_manager.report_not_exist(response.ssl_sock.appid) google_ip.report_connect_closed(response.ssl_sock.ip, "appid not exist") appid = appid_manager.get_appid() if not appid: html = generate_message_html('404 No usable Appid Exists', u'没有可用appid了,请配置可用的appid') send_response(wfile, 404, body=html.encode('utf-8')) response.close() return else: response.close() continue if response.app_status == 403 or response.app_status == 405: #Method not allowed # google have changed from gws to gvs, need to remove. xlog.warning('405 Method not allowed. remove %s ', response.ssl_sock.ip) # some ip can connect, and server type is gws # but can't use as GAE server # so we need remove it immediately google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True) response.close() continue if response.app_status == 503: xlog.warning('APPID %r out of Quota, remove it.', response.ssl_sock.appid) appid_manager.report_out_of_quota(response.ssl_sock.appid) google_ip.report_connect_closed(response.ssl_sock.ip, "get_timeout") appid = appid_manager.get_appid() if not appid: html = generate_message_html('503 No usable Appid Exists', u'appid流量不足,请增加appid') send_response(wfile, 503, body=html.encode('utf-8')) response.close() return else: response.close() continue if response.app_status < 500: break except GAE_Exception as e: errors.append(e) xlog.warn("gae_exception:%r %s", e, url) except Exception as e: errors.append(e) xlog.exception('gae_handler.handler %r %s , retry...', e, url) if response.status == 206: return RangeFetch(method, url, headers, body, response, wfile).fetch() try: wfile.write("HTTP/1.1 %d %s\r\n" % (response.status, response.reason)) response_headers = {} for key, value in response.getheaders(): key = key.title() if key == 'Transfer-Encoding': #http://en.wikipedia.org/wiki/Chunked_transfer_encoding continue if key in skip_headers: continue response_headers[key] = value if 'X-Head-Content-Length' in response_headers: if method == "HEAD": response_headers['Content-Length'] = response_headers[ 'X-Head-Content-Length'] del response_headers['X-Head-Content-Length'] send_to_browser = True try: for key in response_headers: value = response_headers[key] send_header(wfile, key, value) #logging.debug("Head- %s: %s", key, value) wfile.write("\r\n") except Exception as e: send_to_browser = False xlog.warn("gae_handler.handler send response fail. t:%d e:%r %s", time.time() - time_request, e, url) if len(response.app_msg): xlog.warn("APPID error:%d url:%s", response.status, url) wfile.write(response.app_msg) google_ip.report_connect_closed(response.ssl_sock.ip, "app err") response.close() return content_length = int(response.getheader('Content-Length', 0)) content_range = response.getheader('Content-Range', '') if content_range: start, end, length = tuple( int(x) for x in re.search(r'bytes (\d+)-(\d+)/(\d+)', content_range).group(1, 2, 3)) else: start, end, length = 0, content_length - 1, content_length body_length = end - start + 1 last_read_time = time.time() time_response = time.time() while True: if start > end: time_finished = time.time() if body_length > 1024 and time_finished - time_response > 0: speed = body_length / (time_finished - time_response) xlog.info("GAE %d|%s|%d t:%d s:%d hs:%d Spd:%d %d %s", response.ssl_sock.fd, response.ssl_sock.ip, response.ssl_sock.received_size, (time_finished - time_request) * 1000, length, response.ssl_sock.handshake_time, int(speed), response.status, url) else: xlog.info("GAE %d|%s|%d t:%d s:%d hs:%d %d %s", response.ssl_sock.fd, response.ssl_sock.ip, response.ssl_sock.received_size, (time_finished - time_request) * 1000, length, response.ssl_sock.handshake_time, response.status, url) response.ssl_sock.received_size += body_length google_ip.report_ip_traffic(response.ssl_sock.ip, body_length) https_manager.save_ssl_connection_for_reuse( response.ssl_sock, call_time=time_request) return data = response.read(config.AUTORANGE_BUFSIZE) if not data: if time.time() - last_read_time > 20: google_ip.report_connect_closed(response.ssl_sock.ip, "down fail") response.close() xlog.warn("read timeout t:%d len:%d left:%d %s", (time.time() - time_request) * 1000, length, (end - start), url) return else: time.sleep(0.1) continue last_read_time = time.time() data_len = len(data) start += data_len if send_to_browser: try: ret = wfile.write(data) if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ: xlog.debug("send to browser wfile.write ret:%d", ret) ret = wfile.write(data) except Exception as e_b: if e_b[0] in (errno.ECONNABORTED, errno.EPIPE, errno.ECONNRESET ) or 'bad write retry' in repr(e_b): xlog.warn('gae_handler send to browser return %r %r', e_b, url) else: xlog.warn('gae_handler send to browser return %r %r', e_b, url) send_to_browser = False except NetWorkIOError as e: time_except = time.time() time_cost = time_except - time_request if e[0] in (errno.ECONNABORTED, errno.EPIPE) or 'bad write retry' in repr(e): xlog.warn("gae_handler err:%r time:%d %s ", e, time_cost, url) google_ip.report_connect_closed(response.ssl_sock.ip, "Net") else: xlog.exception("gae_handler except:%r %s", e, url) except Exception as e: xlog.exception("gae_handler except:%r %s", e, url)
def fetch(self): response_headers = dict( (k.title(), v) for k, v in self.response.getheaders()) content_range = response_headers['Content-Range'] start, end, length = tuple( int(x) for x in re.search(r'bytes (\d+)-(\d+)/(\d+)', content_range).group(1, 2, 3)) if start == 0: response_headers['Content-Length'] = str(length) del response_headers['Content-Range'] else: response_headers['Content-Range'] = 'bytes %s-%s/%s' % (start, end, length) response_headers['Content-Length'] = str(length - start) xlog.info('>>>>>>>>>>>>>>> RangeFetch started(%r) %d-%d', self.url, start, end) try: self.wfile.write("HTTP/1.1 200 OK\r\n") for key in response_headers: if key == 'Transfer-Encoding': continue if key == 'X-Head-Content-Length': continue if key in skip_headers: continue value = response_headers[key] #logging.debug("Head %s: %s", key.title(), value) send_header(self.wfile, key, value) self.wfile.write("\r\n") except Exception as e: self._stopped = True xlog.warn("RangeFetch send response fail:%r %s", e, self.url) return data_queue = Queue.PriorityQueue() range_queue = Queue.PriorityQueue() range_queue.put((start, end, self.response)) self.expect_begin = start for begin in range(end + 1, length, self.maxsize): range_queue.put((begin, min(begin + self.maxsize - 1, length - 1), None)) for i in xrange(0, self.threads): range_delay_size = i * self.maxsize spawn_later( float(range_delay_size) / self.waitsize, self.__fetchlet, range_queue, data_queue, range_delay_size) has_peek = hasattr(data_queue, 'peek') peek_timeout = 120 while self.expect_begin < length - 1: try: if has_peek: begin, data = data_queue.peek(timeout=peek_timeout) if self.expect_begin == begin: data_queue.get() elif self.expect_begin < begin: time.sleep(0.1) continue else: xlog.error( 'RangeFetch Error: begin(%r) < expect_begin(%r), quit.', begin, self.expect_begin) break else: begin, data = data_queue.get(timeout=peek_timeout) if self.expect_begin == begin: pass elif self.expect_begin < begin: data_queue.put((begin, data)) time.sleep(0.1) continue else: xlog.error( 'RangeFetch Error: begin(%r) < expect_begin(%r), quit.', begin, self.expect_begin) break except Queue.Empty: xlog.error('data_queue peek timeout, break') break try: ret = self.wfile.write(data) if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ: xlog.debug("send to browser wfile.write ret:%d, retry", ret) ret = self.wfile.write(data) xlog.debug("send to browser wfile.write ret:%d", ret) self.expect_begin += len(data) del data except Exception as e: xlog.warn('RangeFetch client closed(%s). %s', e, self.url) break self._stopped = True
def fetch(method, url, headers, body): if isinstance(body, basestring) and body: if len(body) < 10 * 1024 * 1024 and 'Content-Encoding' not in headers: zbody = deflate(body) if len(zbody) < len(body): body = zbody headers['Content-Encoding'] = 'deflate' if len(body) > 10 * 1024 * 1024: xlog.warn("body len:%d %s %s", len(body), method, url) headers['Content-Length'] = str(len(body)) # GAE donot allow set `Host` header if 'Host' in headers: del headers['Host'] kwargs = {} if config.GAE_PASSWORD: kwargs['password'] = config.GAE_PASSWORD #kwargs['options'] = #kwargs['validate'] = kwargs['maxsize'] = config.AUTORANGE_MAXSIZE kwargs['timeout'] = '19' payload = '%s %s HTTP/1.1\r\n' % (method, url) payload += ''.join('%s: %s\r\n' % (k, v) for k, v in headers.items() if k not in skip_headers) #for k, v in headers.items(): # logging.debug("Send %s: %s", k, v) payload += ''.join('X-URLFETCH-%s: %s\r\n' % (k, v) for k, v in kwargs.items() if v) request_headers = {} payload = deflate(payload) body = '%s%s%s' % (struct.pack('!h', len(payload)), payload, body) request_headers['Content-Length'] = str(len(body)) response = request(request_headers, body) response.app_msg = '' response.app_status = response.status if response.app_status != 200: return response data = response.read(2) if len(data) < 2: xlog.warn("fetch too short lead byte len:%d %s", len(data), url) response.app_status = 502 response.fp = io.BytesIO( b'connection aborted. too short lead byte data=' + data) response.read = response.fp.read return response headers_length, = struct.unpack('!h', data) data = response.read(headers_length) if len(data) < headers_length: xlog.warn("fetch too short header need:%d get:%d %s", headers_length, len(data), url) response.app_status = 509 response.fp = io.BytesIO( b'connection aborted. too short headers data=' + data) response.read = response.fp.read return response response.ssl_sock.received_size += headers_length raw_response_line, headers_data = inflate(data).split('\r\n', 1) _, response.status, response.reason = raw_response_line.split(None, 2) response.status = int(response.status) response.reason = response.reason.strip() response.msg = httplib.HTTPMessage(io.BytesIO(headers_data)) response.app_msg = response.msg.fp.read() return response
def do_CONNECT_DIRECT(self): """deploy fake cert to client""" host, _, port = self.path.rpartition(':') port = int(port) if port != 443: xlog.warn("CONNECT %s port:%d not support", host, port) return certfile = CertUtil.get_cert(host) xlog.info('GAE %s %s:%d ', self.command, host, port) self.__realconnection = None self.wfile.write(b'HTTP/1.1 200 OK\r\n\r\n') try: ssl_sock = ssl.wrap_socket(self.connection, keyfile=certfile, certfile=certfile, server_side=True) except ssl.SSLError as e: xlog.info('ssl error: %s, create full domain cert for host:%s', e, host) certfile = CertUtil.get_cert(host, full_name=True) return except Exception as e: if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET): xlog.exception( 'ssl.wrap_socket(self.connection=%r) failed: %s path:%s, errno:%s', self.connection, e, self.path, e.args[0]) return self.__realconnection = self.connection self.__realwfile = self.wfile self.__realrfile = self.rfile self.connection = ssl_sock self.rfile = self.connection.makefile('rb', self.bufsize) self.wfile = self.connection.makefile('wb', 0) try: self.raw_requestline = self.rfile.readline(65537) if len(self.raw_requestline) > 65536: self.requestline = '' self.request_version = '' self.command = '' self.send_error(414) return if not self.raw_requestline: self.close_connection = 1 return if not self.parse_request(): return except NetWorkIOError as e: if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET, errno.EPIPE): raise if self.path[0] == '/' and host: self.path = 'https://%s%s' % (self.headers['Host'], self.path) xlog.debug('GAE CONNECT Direct %s %s', self.command, self.path) try: if self.path[0] == '/' and host: self.path = 'http://%s%s' % (host, self.path) elif not host and '://' in self.path: host = urlparse.urlparse(self.path).netloc self.parsed_url = urlparse.urlparse(self.path) if len(self.parsed_url[4]): path = '?'.join([self.parsed_url[2], self.parsed_url[4]]) else: path = self.parsed_url[2] request_headers = dict( (k.title(), v) for k, v in self.headers.items()) payload = b'' if 'Content-Length' in request_headers: try: payload_len = int(request_headers.get('Content-Length', 0)) #logging.debug("payload_len:%d %s %s", payload_len, self.command, self.path) payload = self.rfile.read(payload_len) except NetWorkIOError as e: xlog.error('handle_method_urlfetch read payload failed:%s', e) return direct_handler.handler(self.command, host, path, request_headers, payload, self.wfile) except NetWorkIOError as e: if e.args[0] not in (errno.ECONNABORTED, errno.ETIMEDOUT, errno.EPIPE): raise finally: if self.__realconnection: try: self.__realconnection.shutdown(socket.SHUT_WR) self.__realconnection.close() except NetWorkIOError: pass finally: self.__realconnection = None