def remove_ip_process(self): try: while connect_control.keep_running: try: ip_str = self.to_remove_ip_list.get_nowait() except: break result = check_ip.test(ip_str) if result and result.appspot_ok: self.add_ip(ip_str, result.handshake_time, result.domain, result.server_type) xlog.debug("remove ip process, restore ip:%s", ip_str) continue if not check_ip.network_is_ok(): self.to_remove_ip_list.put(ip_str) xlog.warn( "network is unreachable. check your network connection." ) return xlog.info("real remove ip:%s ", ip_str) self.iplist_need_save = 1 finally: self.remove_ip_thread_num_lock.acquire() self.remove_ip_thread_num -= 1 self.remove_ip_thread_num_lock.release()
def do_POST(self): try: refer = self.headers.getheader('Referer') netloc = urlparse.urlparse(refer).netloc if not netloc.startswith("127.0.0.1") and not netloc.startswitch("localhost"): xlog.warn("web control ref:%s refuse", netloc) return except: pass xlog.debug ('GAEProxy web_control %s %s %s ', self.address_string(), self.command, self.path) try: ctype, pdict = cgi.parse_header(self.headers.getheader('content-type')) if ctype == 'multipart/form-data': self.postvars = cgi.parse_multipart(self.rfile, pdict) elif ctype == 'application/x-www-form-urlencoded': length = int(self.headers.getheader('content-length')) self.postvars = urlparse.parse_qs(self.rfile.read(length), keep_blank_values=1) else: self.postvars = {} except: self.postvars = {} path = urlparse.urlparse(self.path).path if path == '/deploy': return self.req_deploy_handler() elif path == "/config": return self.req_config_handler() elif path == "/scan_ip": return self.req_scan_ip_handler() elif path.startswith("/importip"): return self.req_importip_handler() else: self.wfile.write(b'HTTP/1.1 404\r\nContent-Type: text/plain\r\nConnection: close\r\n\r\n404 Not Found') xlog.info('%s "%s %s HTTP/1.1" 404 -', self.address_string(), self.command, self.path)
def request(headers={}, payload=None): max_retry = 3 for i in range(max_retry): ssl_sock = None try: ssl_sock = https_manager.get_ssl_connection() if not ssl_sock: xlog.debug('create_ssl_connection fail') continue if ssl_sock.host == '': ssl_sock.appid = appid_manager.get_appid() if not ssl_sock.appid: google_ip.report_connect_closed(ssl_sock.ip, "no appid") raise GAE_Exception(1, "no appid can use") headers['Host'] = ssl_sock.appid + ".appspot.com" ssl_sock.host = headers['Host'] else: headers['Host'] = ssl_sock.host response = _request(ssl_sock, headers, payload) if not response: google_ip.report_connect_closed(ssl_sock.ip, "request_fail") ssl_sock.close() continue response.ssl_sock = ssl_sock return response except Exception as e: xlog.exception('request failed:%s', e) if ssl_sock: google_ip.report_connect_closed(ssl_sock.ip, "request_except") ssl_sock.close() raise GAE_Exception(2, "try max times")
def try_sort_gws_ip(self, force=False): if time.time() - self.last_sort_time_for_gws < 10 and not force: return self.ip_lock.acquire() self.last_sort_time_for_gws = time.time() try: self.good_ip_num = 0 ip_rate = {} for ip_str in self.ip_dict: if 'gws' not in self.ip_dict[ip_str]['server']: continue ip_rate[ip_str] = self.ip_dict[ip_str]['handshake_time'] + (self.ip_dict[ip_str]['fail_times'] * 1000) if self.ip_dict[ip_str]['fail_times'] == 0: self.good_ip_num += 1 ip_time = sorted(ip_rate.items(), key=operator.itemgetter(1)) self.gws_ip_list = [ip_str for ip_str,rate in ip_time] except Exception as e: xlog.error("try_sort_ip_by_handshake_time:%s", e) finally: self.ip_lock.release() time_cost = (( time.time() - self.last_sort_time_for_gws) * 1000) if time_cost > 30: xlog.debug("sort ip time:%dms", time_cost) # 5ms for 1000 ip. 70~150ms for 30000 ip. self.adjust_scan_thread_num()
def scan_all_exist_ip(self): max_scan_ip_thread_num = self.max_scan_ip_thread_num self.max_scan_ip_thread_num = 0 self.adjust_scan_thread_num() for ip in self.ip_dict: self.scan_exist_ip_queue.put(ip) xlog.debug("start scan all exist ip, num:%d", self.scan_exist_ip_queue.qsize()) self.keep_scan_all_exist_ip = True scan_threads = [] for i in range(0, 10): th = threading.Thread(target=self.scan_exist_ip_worker, ) th.start() scan_threads.append(th) for th in scan_threads: th.join() self.try_sort_gws_ip() xlog.debug("finished scan all exist ip") self.max_scan_ip_thread_num = max_scan_ip_thread_num self.adjust_scan_thread_num() self.scan_all_ip_thread = None
def check_ip_process(self): while connect_control.keep_running: try: ip, test_time = self.to_check_ip_queue.get() except: continue time_wait = test_time - time.time() if time_wait > 0: time.sleep(time_wait) if check_ip.network_stat == "Fail": try: if self.ip_dict[ip]['fail_times']: self.ip_dict[ip]['fail_times'] = 0 self.good_ip_num += 1 except: pass continue result = check_ip.test_gae_ip(ip) if result: self.add_ip(ip, result.handshake_time, result.domain, "gws") xlog.debug("restore ip:%s", ip) continue xlog.debug("ip:%s real fail", ip)
def get_host_ip(self, host): self.try_sort_gws_ip() self.ip_lock.acquire() try: ip_num = len(self.ip_dict) if ip_num == 0: #logging.warning("no gws ip") time.sleep(1) return None for ip_str in self.ip_dict: domain = self.ip_dict[ip_str]["domain"] if domain != host: continue get_time = self.ip_dict[ip_str]["get_time"] if time.time() - get_time < 10: continue handshake_time = self.ip_dict[ip_str]["handshake_time"] fail_time = self.ip_dict[ip_str]["fail_time"] if time.time() - fail_time < 300: continue xlog.debug("get host:%s ip:%s t:%d", host, ip_str, handshake_time) self.append_ip_history(ip_str, "get") self.ip_dict[ip_str]['get_time'] = time.time() return ip_str except Exception as e: xlog.error("get_gws_ip fail:%s", e) traceback.print_exc() finally: self.ip_lock.release()
def remove_ip_process(self): try: while connect_control.keep_running: try: ip_str = self.to_remove_ip_list.get_nowait() except: break result = check_ip.test(ip_str) if result and result.appspot_ok: self.add_ip(ip_str, result.handshake_time, result.domain, result.server_type) xlog.debug("remove ip process, restore ip:%s", ip_str) continue if not check_ip.network_is_ok(): self.to_remove_ip_list.put(ip_str) xlog.warn("network is unreachable. check your network connection.") return xlog.info("real remove ip:%s ", ip_str) self.iplist_need_save = 1 finally: self.remove_ip_thread_num_lock.acquire() self.remove_ip_thread_num -= 1 self.remove_ip_thread_num_lock.release()
def try_sort_gws_ip(self, force=False): if time.time() - self.last_sort_time_for_gws < 10 and not force: return self.ip_lock.acquire() self.last_sort_time_for_gws = time.time() try: self.good_ip_num = 0 ip_rate = {} for ip_str in self.ip_dict: if 'gws' not in self.ip_dict[ip_str]['server']: continue ip_rate[ip_str] = self.ip_dict[ip_str]['handshake_time'] + ( self.ip_dict[ip_str]['fail_times'] * 1000) if self.ip_dict[ip_str]['fail_times'] == 0: self.good_ip_num += 1 ip_time = sorted(ip_rate.items(), key=operator.itemgetter(1)) self.gws_ip_list = [ip_str for ip_str, rate in ip_time] except Exception as e: xlog.error("try_sort_ip_by_handshake_time:%s", e) finally: self.ip_lock.release() time_cost = ((time.time() - self.last_sort_time_for_gws) * 1000) if time_cost > 30: xlog.debug("sort ip time:%dms", time_cost) # 5ms for 1000 ip. 70~150ms for 30000 ip. self.adjust_scan_thread_num()
def report_connect_fail(self, ip_str, force_remove=False): self.ip_lock.acquire() try: time_now = time.time() if not ip_str in self.ip_dict: return self.ip_dict[ip_str]["links"] -= 1 # ignore if system network is disconnected. if not force_remove: if not check_ip.network_is_ok(): xlog.debug("report_connect_fail network fail") # connect_control.fall_into_honeypot() return fail_time = self.ip_dict[ip_str]["fail_time"] if not force_remove and time_now - fail_time < 1: xlog.debug("fail time too near") return # increase handshake_time to make it can be used in lower probability self.ip_dict[ip_str]["handshake_time"] += 300 if self.ip_dict[ip_str]["fail_times"] == 0: self.good_ip_num -= 1 self.ip_dict[ip_str]["fail_times"] += 1 self.append_ip_history(ip_str, "fail") self.ip_dict[ip_str]["fail_time"] = time_now if force_remove or self.ip_dict[ip_str]["fail_times"] >= 50: property = self.ip_dict[ip_str] server = property["server"] del self.ip_dict[ip_str] if "gws" in server and ip_str in self.gws_ip_list: self.gws_ip_list.remove(ip_str) if not force_remove: self.to_remove_ip_list.put(ip_str) self.try_remove_thread() xlog.info( "remove ip tmp:%s left amount:%d gws_num:%d", ip_str, len(self.ip_dict), len(self.gws_ip_list) ) else: xlog.info( "remove ip:%s left amount:%d gws_num:%d", ip_str, len(self.ip_dict), len(self.gws_ip_list) ) if self.good_ip_num > len(self.ip_dict): self.good_ip_num = len(self.ip_dict) self.iplist_need_save = 1 except Exception as e: xlog.exception("set_ip err:%s", e) finally: self.ip_lock.release() if not self.is_ip_enough(): self.search_more_google_ip()
def report_connect_fail(self, ip, force_remove=False): self.ip_lock.acquire() try: time_now = time.time() if not ip in self.ip_dict: xlog.debug("report_connect_fail %s not exist", ip) return if force_remove: if self.ip_dict[ip]['fail_times'] == 0: self.good_ip_num -= 1 del self.ip_dict[ip] if ip in self.gws_ip_list: self.gws_ip_list.remove(ip) xlog.info("remove ip:%s left amount:%d gws_num:%d", ip, len(self.ip_dict), len(self.gws_ip_list)) return self.ip_dict[ip]['links'] -= 1 # ignore if system network is disconnected. if check_ip.network_stat == "Fail": xlog.debug("report_connect_fail network fail") return check_ip.continue_fail_count += 1 if check_ip.continue_fail_count > 10: check_ip.network_stat = "unknown" xlog.debug("report_connect_fail continue_fail_count:%d", check_ip.continue_fail_count) check_ip.triger_check_network() return fail_time = self.ip_dict[ip]["fail_time"] if time_now - fail_time < 1: xlog.debug("fail time too near %s", ip) return if self.ip_dict[ip]['fail_times'] == 0: self.good_ip_num -= 1 self.ip_dict[ip]['fail_times'] += 1 self.append_ip_history(ip, "fail") self.ip_dict[ip]["fail_time"] = time_now check_ip.triger_check_network() self.to_check_ip_queue.put((ip, time_now + 10)) xlog.debug("report_connect_fail:%s", ip) except Exception as e: xlog.exception("report_connect_fail err:%s", e) finally: self.iplist_need_save = 1 self.ip_lock.release() if not self.is_ip_enough(): self.search_more_google_ip()
def report_connect_fail(self, ip_str, force_remove=False): self.ip_lock.acquire() try: time_now = time.time() if not ip_str in self.ip_dict: return self.ip_dict[ip_str]['links'] -= 1 # ignore if system network is disconnected. if not force_remove: if not check_ip.network_is_ok(): xlog.debug("report_connect_fail network fail") #connect_control.fall_into_honeypot() return fail_time = self.ip_dict[ip_str]["fail_time"] if not force_remove and time_now - fail_time < 1: xlog.debug("fail time too near") return # increase handshake_time to make it can be used in lower probability #self.ip_dict[ip_str]['handshake_time'] += 300 if self.ip_dict[ip_str]['fail_times'] == 0: self.good_ip_num -= 1 self.ip_dict[ip_str]['fail_times'] += 1 self.append_ip_history(ip_str, "fail") self.ip_dict[ip_str]["fail_time"] = time_now if force_remove or self.ip_dict[ip_str]['fail_times'] >= 50: property = self.ip_dict[ip_str] server = property['server'] del self.ip_dict[ip_str] if 'gws' in server and ip_str in self.gws_ip_list: self.gws_ip_list.remove(ip_str) if not force_remove: self.to_remove_ip_list.put(ip_str) self.try_remove_thread() xlog.info("remove ip tmp:%s left amount:%d gws_num:%d", ip_str, len(self.ip_dict), len(self.gws_ip_list)) else: xlog.info("remove ip:%s left amount:%d gws_num:%d", ip_str, len(self.ip_dict), len(self.gws_ip_list)) if self.good_ip_num > len(self.ip_dict): self.good_ip_num = len(self.ip_dict) self.iplist_need_save = 1 except Exception as e: xlog.exception("set_ip err:%s", e) finally: self.ip_lock.release() if not self.is_ip_enough(): self.search_more_google_ip()
def get_new_ssl(self): self.create_more_connection() ret = self.new_conn_pool.get(True, self.max_timeout) if ret: handshake_time, ssl_sock = ret return ssl_sock else: xlog.debug("get_new_ssl timeout fail.") return None
def report_connect_closed(self, ip_str, reason=""): xlog.debug("%s close:%s", ip_str, reason) self.ip_lock.acquire() try: if ip_str in self.ip_dict: self.ip_dict[ip_str]['links'] -= 1 self.append_ip_history(ip_str, "C[%s]"%reason) except Exception as e: xlog.error("report_connect_closed err:%s", e) finally: self.ip_lock.release()
def report_connect_closed(self, ip_str, reason=""): xlog.debug("%s close:%s", ip_str, reason) self.ip_lock.acquire() try: if ip_str in self.ip_dict: self.ip_dict[ip_str]['links'] -= 1 self.append_ip_history(ip_str, "C[%s]" % reason) except Exception as e: xlog.error("report_connect_closed err:%s", e) finally: self.ip_lock.release()
def get_gws_ip(self): self.try_sort_gws_ip() self.ip_lock.acquire() try: ip_num = len(self.gws_ip_list) if ip_num == 0: #logging.warning("no gws ip") #time.sleep(10) return None for i in range(ip_num): time_now = time.time() if self.gws_ip_pointer >= ip_num: if time_now - self.gws_ip_pointer_reset_time < 1: time.sleep(1) continue else: self.gws_ip_pointer = 0 self.gws_ip_pointer_reset_time = time_now elif self.gws_ip_pointer > 0 and time_now - self.gws_ip_pointer_reset_time > 3: self.gws_ip_pointer = 0 self.gws_ip_pointer_reset_time = time_now ip = self.gws_ip_list[self.gws_ip_pointer] get_time = self.ip_dict[ip]["get_time"] if time_now - get_time < self.ip_connect_interval: self.gws_ip_pointer += 1 continue if time_now - self.ip_dict[ip]['success_time'] > 300: # 5 min fail_connect_interval = 1800 # 30 min else: fail_connect_interval = 120 # 2 min fail_time = self.ip_dict[ip]["fail_time"] if time_now - fail_time < fail_connect_interval: self.gws_ip_pointer += 1 continue if self.ip_dict[ip]['links'] >= config.max_links_per_ip: self.gws_ip_pointer += 1 continue handshake_time = self.ip_dict[ip]["handshake_time"] xlog.debug("get ip:%s t:%d", ip, handshake_time) self.append_ip_history(ip, "get") self.ip_dict[ip]['get_time'] = time_now self.ip_dict[ip]['links'] += 1 self.gws_ip_pointer += 1 return ip except Exception as e: xlog.exception("get_gws_ip fail:%r", e) finally: self.ip_lock.release()
def do_METHOD(self): touch_active() host = self.headers.get('Host', '') host_ip, _, port = host.rpartition(':') if host_ip == "127.0.0.1" and port == str(config.LISTEN_PORT): controler = web_control.ControlHandler(self.client_address, self.headers, self.command, self.path, self.rfile, self.wfile) if self.command == "GET": return controler.do_GET() elif self.command == "POST": return controler.do_POST() else: xlog.warn("method not defined: %s", self.command) return if self.path[0] == '/' and host: self.path = 'http://%s%s' % (host, self.path) elif not host and '://' in self.path: host = urlparse.urlparse(self.path).netloc if host.startswith("127.0.0.1") or host.startswith("localhost"): #xlog.warn("Your browser forward localhost to proxy.") return self.forward_local() if self.path == "http://www.twitter.com/xxnet": xlog.debug("%s %s", self.command, self.path) # for web_ui status page # auto detect browser proxy setting is work return self.wfile.write(self.self_check_response_data) self.parsed_url = urlparse.urlparse(self.path) if host in config.HOSTS_GAE: return self.do_AGENT() if host in config.HOSTS_FWD or host in config.HOSTS_DIRECT: return self.wfile.write( ('HTTP/1.1 301\r\nLocation: %s\r\n\r\n' % self.path.replace('http://', 'https://', 1)).encode()) if host.endswith(config.HOSTS_GAE_ENDSWITH): return self.do_AGENT() if host.endswith(config.HOSTS_FWD_ENDSWITH) or host.endswith( config.HOSTS_DIRECT_ENDSWITH): return self.wfile.write( ('HTTP/1.1 301\r\nLocation: %s\r\n\r\n' % self.path.replace('http://', 'https://', 1)).encode()) return self.do_AGENT()
def head_request(self, ssl_sock): if ssl_sock.host == '': ssl_sock.appid = appid_manager.get_appid() if not ssl_sock.appid: xlog.error("no appid can use") return False host = ssl_sock.appid + ".appspot.com" ssl_sock.host = host else: host = ssl_sock.host # public appid don't keep alive, for quota limit. if ssl_sock.appid.startswith( "xxnet-") and ssl_sock.appid[7:].isdigit(): #logging.info("public appid don't keep alive") #self.keep_alive = 0 return False #logging.debug("head request %s", host) request_data = 'HEAD /_gh/ HTTP/1.1\r\nHost: %s\r\n\r\n' % host response = None try: ssl_sock.settimeout(10) ssl_sock.sock.settimeout(10) data = request_data.encode() ret = ssl_sock.send(data) if ret != len(data): xlog.warn("head send len:%d %d", ret, len(data)) response = httplib.HTTPResponse(ssl_sock, buffering=True) response.begin() status = response.status if status != 200: xlog.debug("app head fail status:%d", status) raise Exception("app check fail %r" % status) return True except httplib.BadStatusLine as e: inactive_time = time.time() - ssl_sock.last_use_time xlog.debug("%s keep alive fail, time:%d", ssl_sock.ip, inactive_time) return False except Exception as e: xlog.warn("%s head %s request fail:%r", ssl_sock.ip, ssl_sock.appid, e) return False finally: if response: response.close()
def ssl_closed(self, ip, reason=""): #xlog.debug("%s ssl_closed:%s", ip, reason) self.ip_lock.acquire() try: if ip in self.ip_dict: if self.ip_dict[ip]['links']: self.ip_dict[ip]['links'] -= 1 self.append_ip_history(ip, "C[%s]"%reason) xlog.debug("ssl_closed %s", ip) except Exception as e: xlog.error("ssl_closed %s err:%s", ip, e) finally: self.ip_lock.release()
def _create_connection(ip_port, delay=0): time.sleep(delay) ip = ip_port[0] sock = None # start connection time record start_time = time.time() conn_time = 0 connect_control.start_connect_register(high_prior=True) try: # create a ipv4/ipv6 socket object if config.PROXY_ENABLE: sock = socks.socksocket(socket.AF_INET if ':' not in ip else socket.AF_INET6) else: sock = socket.socket(socket.AF_INET if ':' not in ip else socket.AF_INET6) # set reuseaddr option to avoid 10048 socket error sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # resize socket recv buffer 8K->32K to improve browser releated application performance sock.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 32 * 1024) # disable negal algorithm to send http request quickly. sock.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, True) # set a short timeout to trigger timeout retry more quickly. sock.settimeout(self.timeout) # TCP connect sock.connect(ip_port) # record TCP connection time conn_time = time.time() - start_time xlog.debug("tcp conn %s time:%d", ip, conn_time * 1000) google_ip.update_ip(ip, conn_time * 2000) #logging.info("create_tcp update ip:%s time:%d", ip, conn_time * 2000) # put ssl socket object to output queobj #sock.ip = ip self.tcp_connection_cache.put((time.time(), sock)) except Exception as e: conn_time = int((time.time() - start_time) * 1000) xlog.debug("tcp conn %s fail t:%d", ip, conn_time) google_ip.report_connect_fail(ip) #logging.info("create_tcp report fail ip:%s", ip) if sock: sock.close() finally: self.thread_num_lock.acquire() self.thread_num -= 1 self.thread_num_lock.release() connect_control.end_connect_register(high_prior=True)
def head_request(self, ssl_sock): if ssl_sock.host == '': ssl_sock.appid = appid_manager.get_appid() if not ssl_sock.appid: xlog.error("no appid can use") return False host = ssl_sock.appid + ".appspot.com" ssl_sock.host = host else: host = ssl_sock.host # public appid don't keep alive, for quota limit. if ssl_sock.appid.startswith("xxnet-") and ssl_sock.appid[7:].isdigit(): #logging.info("public appid don't keep alive") #self.keep_alive = 0 return False #logging.debug("head request %s", host) request_data = 'HEAD /_gh/ HTTP/1.1\r\nHost: %s\r\n\r\n' % host response = None try: ssl_sock.settimeout(10) ssl_sock.sock.settimeout(10) data = request_data.encode() ret = ssl_sock.send(data) if ret != len(data): xlog.warn("head send len:%d %d", ret, len(data)) response = httplib.HTTPResponse(ssl_sock, buffering=True) response.begin() status = response.status if status != 200: xlog.debug("app head fail status:%d", status) raise Exception("app check fail %r" % status) return True except httplib.BadStatusLine as e: inactive_time = time.time() - ssl_sock.last_use_time xlog.debug("%s keep alive fail, time:%d", ssl_sock.ip, inactive_time) return False except Exception as e: xlog.warn("%s head %s request fail:%r", ssl_sock.ip, ssl_sock.appid, e) return False finally: if response: response.close()
def _create_connection(ip_port, delay=0): time.sleep(delay) ip = ip_port[0] sock = None # start connection time record start_time = time.time() conn_time = 0 connect_control.start_connect_register(high_prior=True) try: # create a ipv4/ipv6 socket object if config.PROXY_ENABLE: sock = socks.socksocket(socket.AF_INET if ':' not in ip else socket.AF_INET6) else: sock = socket.socket(socket.AF_INET if ':' not in ip else socket.AF_INET6) # set reuseaddr option to avoid 10048 socket error sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # resize socket recv buffer 8K->32K to improve browser releated application performance sock.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 32*1024) # disable negal algorithm to send http request quickly. sock.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, True) # set a short timeout to trigger timeout retry more quickly. sock.settimeout(self.timeout) # TCP connect sock.connect(ip_port) # record TCP connection time conn_time = time.time() - start_time xlog.debug("tcp conn %s time:%d", ip, conn_time * 1000) google_ip.update_ip(ip, conn_time * 2000) #logging.info("create_tcp update ip:%s time:%d", ip, conn_time * 2000) # put ssl socket object to output queobj #sock.ip = ip self.tcp_connection_cache.put((time.time(), sock)) except Exception as e: conn_time = int((time.time() - start_time) * 1000) xlog.debug("tcp conn %s fail t:%d", ip, conn_time) google_ip.report_connect_fail(ip) #logging.info("create_tcp report fail ip:%s", ip) if sock: sock.close() finally: self.thread_num_lock.acquire() self.thread_num -= 1 self.thread_num_lock.release() connect_control.end_connect_register(high_prior=True)
def do_CONNECT_FWD(self): """socket forward for http CONNECT command""" host, _, port = self.path.rpartition(':') port = int(port) xlog.info('FWD %s %s:%d ', self.command, host, port) if host == "appengine.google.com" or host == "www.google.com": connected_in_s = 5 # gae_proxy upload to appengine is slow, it need more 'fresh' connection. else: connected_in_s = 10 # gws connect can be used after tcp connection created 15 s try: self.wfile.write(b'HTTP/1.1 200 OK\r\n\r\n') data = self.connection.recv(1024) except Exception as e: xlog.exception('do_CONNECT_FWD (%r, %r) Exception:%s', host, port, e) self.connection.close() return remote = forwork_manager.create_connection(host=host, port=port, sock_life=connected_in_s) if remote is None: self.connection.close() xlog.warn('FWD %s %s:%d create_connection fail', self.command, host, port) return try: if data: remote.send(data) except Exception as e: xlog.exception('do_CONNECT_FWD (%r, %r) Exception:%s', host, port, e) self.connection.close() remote.close() return # reset timeout default to avoid long http upload failure, but it will delay timeout retry :( remote.settimeout(None) forwork_manager.forward_socket(self.connection, remote, bufsize=self.bufsize) xlog.debug('FWD %s %s:%d with closed', self.command, host, port)
def do_METHOD(self): touch_active() host = self.headers.get('Host', '') host_ip, _, port = host.rpartition(':') if host_ip == "127.0.0.1" and port == str(config.LISTEN_PORT): controler = web_control.ControlHandler(self.client_address, self.headers, self.command, self.path, self.rfile, self.wfile) if self.command == "GET": return controler.do_GET() elif self.command == "POST": return controler.do_POST() else: xlog.warn("method not defined: %s", self.command) return if self.path[0] == '/' and host: self.path = 'http://%s%s' % (host, self.path) elif not host and '://' in self.path: host = urlparse.urlparse(self.path).netloc if host.startswith("127.0.0.1") or host.startswith("localhost"): #xlog.warn("Your browser forward localhost to proxy.") return self.forward_local() if self.path == "http://www.twitter.com/xxnet": xlog.debug("%s %s", self.command, self.path) # for web_ui status page # auto detect browser proxy setting is work return self.wfile.write(self.self_check_response_data) self.parsed_url = urlparse.urlparse(self.path) if host in config.HOSTS_GAE: return self.do_AGENT() if host in config.HOSTS_FWD or host in config.HOSTS_DIRECT: return self.wfile.write(('HTTP/1.1 301\r\nLocation: %s\r\n\r\n' % self.path.replace('http://', 'https://', 1)).encode()) if host.endswith(config.HOSTS_GAE_ENDSWITH): return self.do_AGENT() if host.endswith(config.HOSTS_FWD_ENDSWITH) or host.endswith(config.HOSTS_DIRECT_ENDSWITH): return self.wfile.write(('HTTP/1.1 301\r\nLocation: %s\r\n\r\n' % self.path.replace('http://', 'https://', 1)).encode()) return self.do_AGENT()
def do_POST(self): try: refer = self.headers.getheader('Referer') netloc = urlparse.urlparse(refer).netloc if not netloc.startswith("127.0.0.1") and not netloc.startswitch( "localhost"): xlog.warn("web control ref:%s refuse", netloc) return except: pass xlog.debug('GAEProxy web_control %s %s %s ', self.address_string(), self.command, self.path) try: ctype, pdict = cgi.parse_header( self.headers.getheader('content-type')) if ctype == 'multipart/form-data': self.postvars = cgi.parse_multipart(self.rfile, pdict) elif ctype == 'application/x-www-form-urlencoded': length = int(self.headers.getheader('content-length')) self.postvars = urlparse.parse_qs(self.rfile.read(length), keep_blank_values=1) else: self.postvars = {} except: self.postvars = {} path = urlparse.urlparse(self.path).path if path == '/deploy': return self.req_deploy_handler() elif path == "/config": return self.req_config_handler() elif path == "/scan_ip": return self.req_scan_ip_handler() elif path.startswith("/importip"): return self.req_importip_handler() else: self.wfile.write( b'HTTP/1.1 404\r\nContent-Type: text/plain\r\nConnection: close\r\n\r\n404 Not Found' ) xlog.info('%s "%s %s HTTP/1.1" 404 -', self.address_string(), self.command, self.path)
def forward_socket(self, local, remote, timeout=60, tick=2, bufsize=8192): try: timecount = timeout while 1: timecount -= tick if timecount <= 0: break (ins, _, errors) = select.select([local, remote], [], [local, remote], tick) if errors: break if not ins: continue for sock in ins: data = sock.recv(bufsize) if not data: if sock is remote: xlog.debug("forward remote disconnected.") else: xlog.debug("forward local disconnected.") return if sock is remote: local.sendall(data) timecount = timeout else: remote.sendall(data) timecount = timeout except Exception as e: if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET, errno.ENOTCONN, errno.EPIPE): xlog.exception("forward except:%s.", e) finally: if local: local.close() if remote: remote.close()
def scan_all_exist_ip(self): max_scan_ip_thread_num = self.max_scan_ip_thread_num self.max_scan_ip_thread_num = 0 self.adjust_scan_thread_num() for ip in self.ip_dict: self.scan_exist_ip_queue.put(ip) xlog.debug("start scan all exist ip, num:%d", self.scan_exist_ip_queue.qsize()) scan_threads = [] for i in range(0, 10): th = threading.Thread(target=self.scan_exist_ip_worker, ) th.start() scan_threads.append(th) for th in scan_threads: th.join() self.try_sort_gws_ip() xlog.debug("finished scan all exist ip") self.max_scan_ip_thread_num = max_scan_ip_thread_num self.adjust_scan_thread_num()
def get_ssl_connection(self, host=''): ssl_sock = None if host: if host in self.host_conn_pool: while True: ret = self.host_conn_pool[host].get_nowait() if ret: handshake_time, ssl_sock = ret else: ssl_sock = None break if time.time( ) - ssl_sock.last_use_time < self.keep_alive + 1: xlog.debug("host_conn_pool %s get:%s handshake:%d", host, ssl_sock.ip, handshake_time) break else: google_ip.report_connect_closed( ssl_sock.ip, "get_timeout") ssl_sock.close() continue else: while True: ret = self.gae_conn_pool.get_nowait() if ret: handshake_time, ssl_sock = ret else: ssl_sock = None break if time.time() - ssl_sock.last_use_time < self.keep_alive + 1: xlog.debug("ssl_pool.get:%s handshake:%d", ssl_sock.ip, handshake_time) break else: google_ip.report_connect_closed(ssl_sock.ip, "get_timeout") ssl_sock.close() continue self.create_more_connection() if ssl_sock: return ssl_sock else: ret = self.new_conn_pool.get(True, self.max_timeout) if ret: handshake_time, ssl_sock = ret return ssl_sock else: xlog.debug("create ssl timeout fail.") return None
def network_is_ok(): global checking_lock, checking_num, network_ok, last_check_time, check_network_interval if time.time() - last_check_time < check_network_interval: return network_ok if time.time() - last_ok_time < check_network_interval: return True if checking_num > 0: return network_ok if config.PROXY_ENABLE: socket.socket = socks.socksocket xlog.debug("patch socks") checking_lock.acquire() checking_num += 1 checking_lock.release() try: conn = httplib.HTTPSConnection("github.com", 443, timeout=30) header = {"user-agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Safari/537.36", "accept":"application/json, text/javascript, */*; q=0.01", "accept-encoding":"gzip, deflate, sdch", "accept-language":'en-US,en;q=0.8,ja;q=0.6,zh-CN;q=0.4,zh;q=0.2', "connection":"keep-alive" } conn.request("HEAD", "/", headers=header) response = conn.getresponse() if response.status: xlog.debug("network is ok") network_ok = True last_check_time = time.time() return True except: pass finally: checking_lock.acquire() checking_num -= 1 checking_lock.release() if config.PROXY_ENABLE: socket.socket = default_socket xlog.debug("restore socket") xlog.warn("network fail.") network_ok = False last_check_time = time.time() return False
def get_ssl_connection(self, host=''): ssl_sock = None if host: if host in self.host_conn_pool: while True: ret = self.host_conn_pool[host].get_nowait() if ret: handshake_time, ssl_sock = ret else: ssl_sock = None break if time.time() - ssl_sock.last_use_time < self.keep_alive+1: xlog.debug("host_conn_pool %s get:%s handshake:%d", host, ssl_sock.ip, handshake_time) break else: google_ip.report_connect_closed(ssl_sock.ip, "get_timeout") ssl_sock.close() continue else: while True: ret = self.gae_conn_pool.get_nowait() if ret: handshake_time, ssl_sock = ret else: ssl_sock = None break if time.time() - ssl_sock.last_use_time < self.keep_alive+1: xlog.debug("ssl_pool.get:%s handshake:%d", ssl_sock.ip, handshake_time) break else: google_ip.report_connect_closed(ssl_sock.ip, "get_timeout") ssl_sock.close() continue self.create_more_connection() if ssl_sock: return ssl_sock else: ret = self.new_conn_pool.get(True, self.max_timeout) if ret: handshake_time, ssl_sock = ret return ssl_sock else: xlog.debug("create ssl timeout fail.") return None
def network_is_ok(): global checking_lock, checking_num, network_ok, last_check_time, check_network_interval if time.time() - last_check_time < check_network_interval: return network_ok if checking_num > 0: return network_ok if config.PROXY_ENABLE: socket.socket = socks.socksocket xlog.debug("patch socks") checking_lock.acquire() checking_num += 1 checking_lock.release() try: conn = httplib.HTTPSConnection("github.com", 443, timeout=30) header = { "user-agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Safari/537.36", "accept": "application/json, text/javascript, */*; q=0.01", "accept-encoding": "gzip, deflate, sdch", "accept-language": 'en-US,en;q=0.8,ja;q=0.6,zh-CN;q=0.4,zh;q=0.2', "connection": "keep-alive" } conn.request("HEAD", "/", headers=header) response = conn.getresponse() if response.status: xlog.debug("network is ok") network_ok = True last_check_time = time.time() return True except: pass finally: checking_lock.acquire() checking_num -= 1 checking_lock.release() if config.PROXY_ENABLE: socket.socket = default_socket xlog.debug("restore socket") xlog.warn("network fail.") network_ok = False last_check_time = time.time() return False
def simple_check_worker(): global checking_lock, checking_num, network_stat, last_check_time time_now = time.time() if config.PROXY_ENABLE: socket.socket = socks.socksocket xlog.debug("patch socks") checking_lock.acquire() checking_num += 1 checking_lock.release() try: conn = httplib.HTTPConnection("www.baidu.com", 80, timeout=3) header = { "user-agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Safari/537.36", "accept": "application/json, text/javascript, */*; q=0.01", "accept-encoding": "gzip, deflate, sdch", "accept-language": 'en-US,en;q=0.8,ja;q=0.6,zh-CN;q=0.4,zh;q=0.2', "connection": "keep-alive" } conn.request("HEAD", "/", headers=header) response = conn.getresponse() if response.status: last_check_time = time.time() report_network_ok() xlog.debug("network is ok, cost:%d ms", 1000 * (time.time() - time_now)) return True except Exception as e: xlog.warn("network fail:%r", e) network_stat = "Fail" last_check_time = time.time() return False finally: checking_lock.acquire() checking_num -= 1 checking_lock.release() if config.PROXY_ENABLE: socket.socket = default_socket xlog.debug("restore socket")
def simple_check_worker(): global checking_lock, checking_num, network_stat, last_check_time time_now = time.time() if config.PROXY_ENABLE: socket.socket = socks.socksocket xlog.debug("patch socks") checking_lock.acquire() checking_num += 1 checking_lock.release() try: conn = httplib.HTTPConnection("www.baidu.com", 80, timeout=3) header = {"user-agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Safari/537.36", "accept":"application/json, text/javascript, */*; q=0.01", "accept-encoding":"gzip, deflate, sdch", "accept-language":'en-US,en;q=0.8,ja;q=0.6,zh-CN;q=0.4,zh;q=0.2', "connection":"keep-alive" } conn.request("HEAD", "/", headers=header) response = conn.getresponse() if response.status: last_check_time = time.time() report_network_ok() xlog.debug("network is ok, cost:%d ms", 1000*(time.time() - time_now)) return True except Exception as e: xlog.warn("network fail:%r", e) network_stat = "Fail" last_check_time = time.time() return False finally: checking_lock.acquire() checking_num -= 1 checking_lock.release() if config.PROXY_ENABLE: socket.socket = default_socket xlog.debug("restore socket")
def do_CONNECT_AGENT(self): """deploy fake cert to client""" # GAE supports the following HTTP methods: GET, POST, HEAD, PUT, DELETE, and PATCH host, _, port = self.path.rpartition(':') port = int(port) certfile = CertUtil.get_cert(host) xlog.info('GAE %s %s:%d ', self.command, host, port) self.__realconnection = None self.wfile.write(b'HTTP/1.1 200 OK\r\n\r\n') try: ssl_sock = ssl.wrap_socket(self.connection, keyfile=certfile, certfile=certfile, server_side=True) except ssl.SSLError as e: xlog.info('ssl error: %s, create full domain cert for host:%s', e, host) certfile = CertUtil.get_cert(host, full_name=True) return except Exception as e: if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET): xlog.exception( 'ssl.wrap_socket(self.connection=%r) failed: %s path:%s, errno:%s', self.connection, e, self.path, e.args[0]) return self.__realconnection = self.connection self.__realwfile = self.wfile self.__realrfile = self.rfile self.connection = ssl_sock self.rfile = self.connection.makefile('rb', self.bufsize) self.wfile = self.connection.makefile('wb', 0) try: self.raw_requestline = self.rfile.readline(65537) if len(self.raw_requestline) > 65536: self.requestline = '' self.request_version = '' self.command = '' self.send_error(414) xlog.warn("read request line len:%d", len(self.raw_requestline)) return if not self.raw_requestline: xlog.warn("read request line empty") return if not self.parse_request(): xlog.warn("parse request fail:%s", self.raw_requestline) return except NetWorkIOError as e: if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET, errno.EPIPE): xlog.exception( 'ssl.wrap_socket(self.connection=%r) failed: %s path:%s, errno:%s', self.connection, e, self.path, e.args[0]) raise if self.path[0] == '/' and host: self.path = 'https://%s%s' % (self.headers['Host'], self.path) if self.path == "https://www.twitter.com/xxnet": # for web_ui status page # auto detect browser proxy setting is work xlog.debug("CONNECT %s %s", self.command, self.path) return self.wfile.write(self.self_check_response_data) xlog.debug('GAE CONNECT %s %s', self.command, self.path) if self.command not in self.gae_support_methods: if host.endswith(".google.com") or host.endswith( config.HOSTS_DIRECT_ENDSWITH) or host.endswith( config.HOSTS_GAE_ENDSWITH): if host in config.HOSTS_GAE: gae_set = [s for s in config.HOSTS_GAE] gae_set.remove(host) config.HOSTS_GAE = tuple(gae_set) if host not in config.HOSTS_DIRECT: fwd_set = [s for s in config.HOSTS_DIRECT] fwd_set.append(host) config.HOSTS_DIRECT = tuple(fwd_set) xlog.warn( "Method %s not support in GAE, Redirect to DIRECT for %s", self.command, self.path) return self.wfile.write( ('HTTP/1.1 301\r\nLocation: %s\r\n\r\n' % self.path).encode()) else: xlog.warn("Method %s not support in GAEProxy for %s", self.command, self.path) return self.wfile.write( ('HTTP/1.1 404 Not Found\r\n\r\n').encode()) try: if self.path[0] == '/' and host: self.path = 'http://%s%s' % (host, self.path) elif not host and '://' in self.path: host = urlparse.urlparse(self.path).netloc self.parsed_url = urlparse.urlparse(self.path) return self.do_AGENT() except NetWorkIOError as e: if e.args[0] not in (errno.ECONNABORTED, errno.ETIMEDOUT, errno.EPIPE): raise finally: if self.__realconnection: try: self.__realconnection.shutdown(socket.SHUT_WR) self.__realconnection.close() except NetWorkIOError: pass finally: self.__realconnection = None
def __init__(self, type, message): xlog.debug("GAE_Exception %r %r", type, message) self.type = type self.message = message
def fetch(self): response_headers = dict((k.title(), v) for k, v in self.response.getheaders()) content_range = response_headers['Content-Range'] start, end, length = tuple(int(x) for x in re.search(r'bytes (\d+)-(\d+)/(\d+)', content_range).group(1, 2, 3)) if start == 0: response_headers['Content-Length'] = str(length) del response_headers['Content-Range'] else: response_headers['Content-Range'] = 'bytes %s-%s/%s' % (start, end, length) response_headers['Content-Length'] = str(length-start) xlog.info('>>>>>>>>>>>>>>> RangeFetch started(%r) %d-%d', self.url, start, end) try: self.wfile.write("HTTP/1.1 200 OK\r\n") for key in response_headers: if key == 'Transfer-Encoding': continue if key == 'X-Head-Content-Length': continue if key in skip_headers: continue value = response_headers[key] #logging.debug("Head %s: %s", key.title(), value) send_header(self.wfile, key, value) self.wfile.write("\r\n") except Exception as e: self._stopped = True xlog.warn("RangeFetch send response fail:%r %s", e, self.url) return data_queue = Queue.PriorityQueue() range_queue = Queue.PriorityQueue() range_queue.put((start, end, self.response)) self.expect_begin = start for begin in range(end+1, length, self.maxsize): range_queue.put((begin, min(begin+self.maxsize-1, length-1), None)) for i in xrange(0, self.threads): range_delay_size = i * self.maxsize spawn_later(float(range_delay_size)/self.waitsize, self.__fetchlet, range_queue, data_queue, range_delay_size) has_peek = hasattr(data_queue, 'peek') peek_timeout = 120 while self.expect_begin < length - 1: try: if has_peek: begin, data = data_queue.peek(timeout=peek_timeout) if self.expect_begin == begin: data_queue.get() elif self.expect_begin < begin: time.sleep(0.1) continue else: xlog.error('RangeFetch Error: begin(%r) < expect_begin(%r), quit.', begin, self.expect_begin) break else: begin, data = data_queue.get(timeout=peek_timeout) if self.expect_begin == begin: pass elif self.expect_begin < begin: data_queue.put((begin, data)) time.sleep(0.1) continue else: xlog.error('RangeFetch Error: begin(%r) < expect_begin(%r), quit.', begin, self.expect_begin) break except Queue.Empty: xlog.error('data_queue peek timeout, break') break try: ret = self.wfile.write(data) if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ: xlog.debug("send to browser wfile.write ret:%d, retry", ret) ret = self.wfile.write(data) xlog.debug("send to browser wfile.write ret:%d", ret) self.expect_begin += len(data) del data except Exception as e: xlog.warn('RangeFetch client closed(%s). %s', e, self.url) break self._stopped = True
def handler(method, url, headers, body, wfile): time_request = time.time() headers = clean_empty_header(headers) errors = [] response = None while True: if time.time() - time_request > 30: #time out return return_fail_message(wfile) try: response = fetch(method, url, headers, body) if response.app_status != 200: xlog.warn("fetch gae status:%s url:%s", response.app_status, url) try: server_type = response.getheader('Server', "") if "gws" not in server_type and "Google Frontend" not in server_type and "GFE" not in server_type: xlog.warn("IP:%s not support GAE, server type:%s", response.ssl_sock.ip, server_type) google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True) response.close() continue except Exception as e: errors.append(e) xlog.warn('gae_handler.handler %r %s , retry...', e, url) continue if response.app_status == 404: #xlog.warning('APPID %r not exists, remove it.', response.ssl_sock.appid) appid_manager.report_not_exist(response.ssl_sock.appid, response.ssl_sock.ip) google_ip.report_connect_closed(response.ssl_sock.ip, "appid not exist") appid = appid_manager.get_appid() if not appid: html = generate_message_html('404 No usable Appid Exists', u'没有可用appid了,请配置可用的appid') send_response(wfile, 404, body=html.encode('utf-8')) response.close() return else: response.close() continue if response.app_status == 403 or response.app_status == 405: #Method not allowed # google have changed from gws to gvs, need to remove. xlog.warning('405 Method not allowed. remove %s ', response.ssl_sock.ip) # some ip can connect, and server type is gws # but can't use as GAE server # so we need remove it immediately google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True) response.close() continue if response.app_status == 503: xlog.warning('APPID %r out of Quota, remove it.', response.ssl_sock.appid) appid_manager.report_out_of_quota(response.ssl_sock.appid) google_ip.report_connect_closed(response.ssl_sock.ip, "get_timeout") appid = appid_manager.get_appid() if not appid: html = generate_message_html('503 No usable Appid Exists', u'appid流量不足,请增加appid') send_response(wfile, 503, body=html.encode('utf-8')) response.close() return else: response.close() continue if response.app_status < 500: break except GAE_Exception as e: errors.append(e) xlog.warn("gae_exception:%r %s", e, url) except Exception as e: errors.append(e) xlog.exception('gae_handler.handler %r %s , retry...', e, url) if response.status == 206: return RangeFetch(method, url, headers, body, response, wfile).fetch() try: wfile.write("HTTP/1.1 %d %s\r\n" % (response.status, response.reason)) response_headers = {} for key, value in response.getheaders(): key = key.title() if key == 'Transfer-Encoding': #http://en.wikipedia.org/wiki/Chunked_transfer_encoding continue if key in skip_headers: continue response_headers[key] = value if 'X-Head-Content-Length' in response_headers: if method == "HEAD": response_headers['Content-Length'] = response_headers['X-Head-Content-Length'] del response_headers['X-Head-Content-Length'] send_to_browser = True try: for key in response_headers: value = response_headers[key] send_header(wfile, key, value) #logging.debug("Head- %s: %s", key, value) wfile.write("\r\n") except Exception as e: send_to_browser = False xlog.warn("gae_handler.handler send response fail. t:%d e:%r %s", time.time()-time_request, e, url) if len(response.app_msg): xlog.warn("APPID error:%d url:%s", response.status, url) wfile.write(response.app_msg) google_ip.report_connect_closed(response.ssl_sock.ip, "app err") response.close() return content_length = int(response.getheader('Content-Length', 0)) content_range = response.getheader('Content-Range', '') if content_range: start, end, length = tuple(int(x) for x in re.search(r'bytes (\d+)-(\d+)/(\d+)', content_range).group(1, 2, 3)) else: start, end, length = 0, content_length-1, content_length body_length = end - start + 1 last_read_time = time.time() time_response = time.time() while True: if start > end: time_finished = time.time() if body_length > 1024 and time_finished - time_response > 0: speed = body_length / (time_finished - time_response) xlog.info("GAE %d|%s|%d t:%d s:%d hs:%d Spd:%d %d %s", response.ssl_sock.fd, response.ssl_sock.ip, response.ssl_sock.received_size, (time_finished-time_request)*1000, length, response.ssl_sock.handshake_time, int(speed), response.status, url) else: xlog.info("GAE %d|%s|%d t:%d s:%d hs:%d %d %s", response.ssl_sock.fd, response.ssl_sock.ip, response.ssl_sock.received_size, (time_finished-time_request)*1000, length, response.ssl_sock.handshake_time, response.status, url) response.ssl_sock.received_size += body_length https_manager.save_ssl_connection_for_reuse(response.ssl_sock, call_time=time_request) return data = response.read(config.AUTORANGE_BUFSIZE) if not data: if time.time() - last_read_time > 20: google_ip.report_connect_closed(response.ssl_sock.ip, "down fail") response.close() xlog.warn("read timeout t:%d len:%d left:%d %s", (time.time()-time_request)*1000, length, (end-start), url) return else: time.sleep(0.1) continue last_read_time = time.time() data_len = len(data) start += data_len if send_to_browser: try: ret = wfile.write(data) if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ: xlog.debug("send to browser wfile.write ret:%d", ret) ret = wfile.write(data) except Exception as e_b: if e_b[0] in (errno.ECONNABORTED, errno.EPIPE, errno.ECONNRESET) or 'bad write retry' in repr(e_b): xlog.warn('gae_handler send to browser return %r %r', e_b, url) else: xlog.warn('gae_handler send to browser return %r %r', e_b, url) send_to_browser = False except NetWorkIOError as e: time_except = time.time() time_cost = time_except - time_request if e[0] in (errno.ECONNABORTED, errno.EPIPE) or 'bad write retry' in repr(e): xlog.warn("gae_handler err:%r time:%d %s ", e, time_cost, url) google_ip.report_connect_closed(response.ssl_sock.ip, "Net") else: xlog.exception("gae_handler except:%r %s", e, url) except Exception as e: xlog.exception("gae_handler except:%r %s", e, url)
def _create_ssl_connection(self, ip_port): if not connect_control.allow_connect(): time.sleep(10) return False sock = None ssl_sock = None ip = ip_port[0] connect_control.start_connect_register(high_prior=True) connect_time = 0 handshake_time = 0 time_begin = time.time() try: if config.PROXY_ENABLE: sock = socks.socksocket(socket.AF_INET if ':' not in ip_port[0] else socket.AF_INET6) else: sock = socket.socket(socket.AF_INET if ':' not in ip_port[0] else socket.AF_INET6) # set reuseaddr option to avoid 10048 socket error sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # set struct linger{l_onoff=1,l_linger=0} to avoid 10048 socket error sock.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, struct.pack('ii', 1, 0)) # resize socket recv buffer 8K->32K to improve browser releated application performance sock.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 32 * 1024) # disable negal algorithm to send http request quickly. sock.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, True) # set a short timeout to trigger timeout retry more quickly. sock.settimeout(self.timeout) ssl_sock = SSLConnection(self.openssl_context, sock) ssl_sock.set_connect_state() ssl_sock.connect(ip_port) time_connected = time.time() ssl_sock.do_handshake() time_handshaked = time.time() connect_time = int((time_connected - time_begin) * 1000) handshake_time = int((time_handshaked - time_connected) * 1000) google_ip.update_ip(ip, handshake_time) xlog.debug("create_ssl update ip:%s time:%d", ip, handshake_time) # sometimes, we want to use raw tcp socket directly(select/epoll), so setattr it to ssl socket. ssl_sock.ip = ip ssl_sock.sock = sock ssl_sock.fd = sock.fileno() ssl_sock.create_time = time_begin ssl_sock.received_size = 0 ssl_sock.load = 0 ssl_sock.handshake_time = handshake_time ssl_sock.host = '' def verify_SSL_certificate_issuer(ssl_sock): cert = ssl_sock.get_peer_certificate() if not cert: #google_ip.report_bad_ip(ssl_sock.ip) #connect_control.fall_into_honeypot() raise socket.error(' certficate is none') issuer_commonname = next( (v for k, v in cert.get_issuer().get_components() if k == 'CN'), '') if not issuer_commonname.startswith('Google'): google_ip.report_connect_fail(ip, force_remove=True) raise socket.error( ' certficate is issued by %r, not Google' % (issuer_commonname)) verify_SSL_certificate_issuer(ssl_sock) connect_control.report_connect_success() return ssl_sock except Exception as e: time_cost = time.time() - time_begin if time_cost < self.timeout - 1: xlog.debug("connect %s fail:%s cost:%d h:%d", ip, e, time_cost * 1000, handshake_time) else: xlog.debug("%s fail:%r", ip, e) google_ip.report_connect_fail(ip) connect_control.report_connect_fail() if ssl_sock: ssl_sock.close() if sock: sock.close() return False finally: connect_control.end_connect_register(high_prior=True)
def _create_ssl_connection(self, ip_port): if not connect_control.allow_connect(): time.sleep(10) return False sock = None ssl_sock = None ip = ip_port[0] connect_control.start_connect_register(high_prior=True) connect_time = 0 handshake_time = 0 time_begin = time.time() try: if config.PROXY_ENABLE: sock = socks.socksocket(socket.AF_INET if ':' not in ip else socket.AF_INET6) else: sock = socket.socket(socket.AF_INET if ':' not in ip else socket.AF_INET6) # set reuseaddr option to avoid 10048 socket error sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # set struct linger{l_onoff=1,l_linger=0} to avoid 10048 socket error sock.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, struct.pack('ii', 1, 0)) # resize socket recv buffer 8K->32K to improve browser releated application performance sock.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 32*1024) # disable negal algorithm to send http request quickly. sock.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, True) # set a short timeout to trigger timeout retry more quickly. sock.settimeout(self.timeout) ssl_sock = SSLConnection(self.openssl_context, sock, ip, google_ip.ssl_closed) ssl_sock.set_connect_state() ssl_sock.connect(ip_port) time_connected = time.time() ssl_sock.do_handshake() time_handshaked = time.time() connect_time = int((time_connected - time_begin) * 1000) handshake_time = int((time_handshaked - time_connected) * 1000) google_ip.update_ip(ip, handshake_time) xlog.debug("create_ssl update ip:%s time:%d", ip, handshake_time) # sometimes, we want to use raw tcp socket directly(select/epoll), so setattr it to ssl socket. ssl_sock.ip = ip ssl_sock.sock = sock ssl_sock.fd = sock.fileno() ssl_sock.create_time = time_begin ssl_sock.received_size = 0 ssl_sock.load = 0 ssl_sock.handshake_time = handshake_time ssl_sock.host = '' def verify_SSL_certificate_issuer(ssl_sock): cert = ssl_sock.get_peer_certificate() if not cert: #google_ip.report_bad_ip(ssl_sock.ip) #connect_control.fall_into_honeypot() raise socket.error(' certficate is none') issuer_commonname = next((v for k, v in cert.get_issuer().get_components() if k == 'CN'), '') if not issuer_commonname.startswith('Google'): google_ip.report_connect_fail(ip, force_remove=True) raise socket.error(' certficate is issued by %r, not Google' % ( issuer_commonname)) verify_SSL_certificate_issuer(ssl_sock) connect_control.report_connect_success() return ssl_sock except Exception as e: time_cost = time.time() - time_begin if time_cost < self.timeout - 1: xlog.debug("connect %s fail:%s cost:%d h:%d", ip, e, time_cost * 1000, handshake_time) else: xlog.debug("%s fail:%r", ip, e) google_ip.report_connect_fail(ip) connect_control.report_connect_fail() if ssl_sock: ssl_sock.close() if sock: sock.close() return False finally: connect_control.end_connect_register(high_prior=True)
def get_gws_ip(self): self.try_sort_gws_ip() self.ip_lock.acquire() try: ip_num = len(self.gws_ip_list) if ip_num == 0: #logging.warning("no gws ip") #time.sleep(10) return None for i in range(ip_num): time_now = time.time() if self.gws_ip_pointer >= ip_num: if time_now - self.gws_ip_pointer_reset_time < 1: time.sleep(1) continue else: self.gws_ip_pointer = 0 self.gws_ip_pointer_reset_time = time_now elif self.gws_ip_pointer > 0 and time_now - self.gws_ip_pointer_reset_time > 3: self.gws_ip_pointer = 0 self.gws_ip_pointer_reset_time = time_now ip_str = self.gws_ip_list[self.gws_ip_pointer] get_time = self.ip_dict[ip_str]["get_time"] if time_now - get_time < self.ip_connect_interval: self.gws_ip_pointer += 1 continue if time_now - self.ip_dict[ip_str]['success_time'] > 300: # 5 min fail_connect_interval = 1800 # 30 min else: fail_connect_interval = 120 # 2 min fail_time = self.ip_dict[ip_str]["fail_time"] if time_now - fail_time < fail_connect_interval: self.gws_ip_pointer += 1 continue if self.trafic_control: # not check now active_time = self.ip_dict[ip_str]['data_active'] transfered_data = self.ip_dict[ip_str]['transfered_data'] - ((time_now - active_time) * config.ip_traffic_quota) if transfered_data > config.ip_traffic_quota_base: self.gws_ip_pointer += 1 continue if self.ip_dict[ip_str]['links'] >= config.max_links_per_ip: self.gws_ip_pointer += 1 continue handshake_time = self.ip_dict[ip_str]["handshake_time"] xlog.debug("get ip:%s t:%d", ip_str, handshake_time) self.append_ip_history(ip_str, "get") self.ip_dict[ip_str]['get_time'] = time_now self.ip_dict[ip_str]['links'] += 1 self.gws_ip_pointer += 1 return ip_str except Exception as e: xlog.error("get_gws_ip fail:%s", e) traceback.print_exc() finally: self.ip_lock.release()
def do_GET(self): path = urlparse.urlparse(self.path).path if path == "/log": return self.req_log_handler() elif path == "/status": return self.req_status_handler() else: xlog.debug('GAEProxy Web_control %s %s %s ', self.address_string(), self.command, self.path) if path == '/deploy': return self.req_deploy_handler() elif path == "/config": return self.req_config_handler() elif path == "/ip_list": return self.req_ip_list_handler() elif path == "/scan_ip": return self.req_scan_ip_handler() elif path == "/ssl_pool": return self.req_ssl_pool_handler() elif path == "/download_cert": return self.req_download_cert_handler() elif path == "/is_ready": return self.req_is_ready_handler() elif path == "/test_ip": return self.req_test_ip_handler() elif path == "/check_ip": return self.req_check_ip_handler() elif path == "/quit": connect_control.keep_running = False data = "Quit" self.wfile.write(('HTTP/1.1 200\r\nContent-Type: %s\r\nContent-Length: %s\r\n\r\n' % ('text/plain', len(data))).encode()) self.wfile.write(data) #sys.exit(0) #quit() #os._exit(0) return elif path.startswith("/wizard/"): file_path = os.path.abspath(os.path.join(web_ui_path, '/'.join(path.split('/')[1:]))) if not os.path.isfile(file_path): self.wfile.write(b'HTTP/1.1 404 Not Found\r\n\r\n') xlog.warn('%s %s %s wizard file %s not found', self.address_string(), self.command, self.path, file_path) return if file_path.endswith('.html'): mimetype = 'text/html' elif file_path.endswith('.png'): mimetype = 'image/png' elif file_path.endswith('.jpg') or file_path.endswith('.jpeg'): mimetype = 'image/jpeg' else: mimetype = 'application/octet-stream' self.send_file(file_path, mimetype) return else: xlog.warn('Control Req %s %s %s ', self.address_string(), self.command, self.path) # check for '..', which will leak file if re.search(r'(\.{2})', self.path) is not None: self.wfile.write(b'HTTP/1.1 404\r\n\r\n') xlog.warn('%s %s %s haking', self.address_string(), self.command, self.path ) return filename = os.path.normpath('./' + path) if self.path.startswith(('http://', 'https://')): data = b'HTTP/1.1 200\r\nCache-Control: max-age=86400\r\nExpires:Oct, 01 Aug 2100 00:00:00 GMT\r\nConnection: close\r\n' data += b'\r\n' self.wfile.write(data) xlog.info('%s "%s %s HTTP/1.1" 200 -', self.address_string(), self.command, self.path) elif os.path.isfile(filename): if filename.endswith('.pac'): mimetype = 'text/plain' else: mimetype = 'application/octet-stream' #self.send_file(filename, mimetype) else: self.wfile.write(b'HTTP/1.1 404\r\nContent-Type: text/plain\r\nConnection: close\r\n\r\n404 Not Found') xlog.info('%s "%s %s HTTP/1.1" 404 -', self.address_string(), self.command, self.path)
def do_CONNECT_DIRECT(self): """deploy fake cert to client""" host, _, port = self.path.rpartition(':') port = int(port) if port != 443: xlog.warn("CONNECT %s port:%d not support", host, port) return certfile = CertUtil.get_cert(host) xlog.info('GAE %s %s:%d ', self.command, host, port) self.__realconnection = None self.wfile.write(b'HTTP/1.1 200 OK\r\n\r\n') try: ssl_sock = ssl.wrap_socket(self.connection, keyfile=certfile, certfile=certfile, server_side=True) except ssl.SSLError as e: xlog.info('ssl error: %s, create full domain cert for host:%s', e, host) certfile = CertUtil.get_cert(host, full_name=True) return except Exception as e: if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET): xlog.exception('ssl.wrap_socket(self.connection=%r) failed: %s path:%s, errno:%s', self.connection, e, self.path, e.args[0]) return self.__realconnection = self.connection self.__realwfile = self.wfile self.__realrfile = self.rfile self.connection = ssl_sock self.rfile = self.connection.makefile('rb', self.bufsize) self.wfile = self.connection.makefile('wb', 0) try: self.raw_requestline = self.rfile.readline(65537) if len(self.raw_requestline) > 65536: self.requestline = '' self.request_version = '' self.command = '' self.send_error(414) return if not self.raw_requestline: self.close_connection = 1 return if not self.parse_request(): return except NetWorkIOError as e: if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET, errno.EPIPE): raise if self.path[0] == '/' and host: self.path = 'https://%s%s' % (self.headers['Host'], self.path) xlog.debug('GAE CONNECT %s %s', self.command, self.path) try: if self.path[0] == '/' and host: self.path = 'http://%s%s' % (host, self.path) elif not host and '://' in self.path: host = urlparse.urlparse(self.path).netloc self.parsed_url = urlparse.urlparse(self.path) if len(self.parsed_url[4]): path = '?'.join([self.parsed_url[2], self.parsed_url[4]]) else: path = self.parsed_url[2] request_headers = dict((k.title(), v) for k, v in self.headers.items()) payload = b'' if 'Content-Length' in request_headers: try: payload_len = int(request_headers.get('Content-Length', 0)) #logging.debug("payload_len:%d %s %s", payload_len, self.command, self.path) payload = self.rfile.read(payload_len) except NetWorkIOError as e: xlog.error('handle_method_urlfetch read payload failed:%s', e) return direct_handler.handler(self.command, host, path, request_headers, payload, self.wfile) except NetWorkIOError as e: if e.args[0] not in (errno.ECONNABORTED, errno.ETIMEDOUT, errno.EPIPE): raise finally: if self.__realconnection: try: self.__realconnection.shutdown(socket.SHUT_WR) self.__realconnection.close() except NetWorkIOError: pass finally: self.__realconnection = None
def do_CONNECT_AGENT(self): """deploy fake cert to client""" # GAE supports the following HTTP methods: GET, POST, HEAD, PUT, DELETE, and PATCH host, _, port = self.path.rpartition(':') port = int(port) certfile = CertUtil.get_cert(host) xlog.info('GAE %s %s:%d ', self.command, host, port) self.__realconnection = None self.wfile.write(b'HTTP/1.1 200 OK\r\n\r\n') try: ssl_sock = ssl.wrap_socket(self.connection, keyfile=certfile, certfile=certfile, server_side=True) except ssl.SSLError as e: xlog.info('ssl error: %s, create full domain cert for host:%s', e, host) certfile = CertUtil.get_cert(host, full_name=True) return except Exception as e: if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET): xlog.exception('ssl.wrap_socket(self.connection=%r) failed: %s path:%s, errno:%s', self.connection, e, self.path, e.args[0]) return self.__realconnection = self.connection self.__realwfile = self.wfile self.__realrfile = self.rfile self.connection = ssl_sock self.rfile = self.connection.makefile('rb', self.bufsize) self.wfile = self.connection.makefile('wb', 0) try: self.raw_requestline = self.rfile.readline(65537) if len(self.raw_requestline) > 65536: self.requestline = '' self.request_version = '' self.command = '' self.send_error(414) xlog.warn("read request line len:%d", len(self.raw_requestline)) return if not self.raw_requestline: xlog.warn("read request line empty") return if not self.parse_request(): xlog.warn("parse request fail:%s", self.raw_requestline) return except NetWorkIOError as e: if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET, errno.EPIPE): xlog.exception('ssl.wrap_socket(self.connection=%r) failed: %s path:%s, errno:%s', self.connection, e, self.path, e.args[0]) raise if self.path[0] == '/' and host: self.path = 'https://%s%s' % (self.headers['Host'], self.path) xlog.debug('GAE CONNECT %s %s', self.command, self.path) if self.command not in self.gae_support_methods: if host.endswith(".google.com") or host.endswith(config.HOSTS_DIRECT_ENDSWITH) or host.endswith(config.HOSTS_GAE_ENDSWITH): if host in config.HOSTS_GAE: gae_set = [s for s in config.HOSTS_GAE] gae_set.remove(host) config.HOSTS_GAE = tuple(gae_set) if host not in config.HOSTS_DIRECT: fwd_set = [s for s in config.HOSTS_DIRECT] fwd_set.append(host) config.HOSTS_DIRECT = tuple(fwd_set) xlog.warn("Method %s not support in GAE, Redirect to DIRECT for %s", self.command, self.path) return self.wfile.write(('HTTP/1.1 301\r\nLocation: %s\r\n\r\n' % self.path).encode()) else: xlog.warn("Method %s not support in GAEProxy for %s", self.command, self.path) return self.wfile.write(('HTTP/1.1 404 Not Found\r\n\r\n').encode()) try: if self.path[0] == '/' and host: self.path = 'http://%s%s' % (host, self.path) elif not host and '://' in self.path: host = urlparse.urlparse(self.path).netloc self.parsed_url = urlparse.urlparse(self.path) return self.do_AGENT() except NetWorkIOError as e: if e.args[0] not in (errno.ECONNABORTED, errno.ETIMEDOUT, errno.EPIPE): raise finally: if self.__realconnection: try: self.__realconnection.shutdown(socket.SHUT_WR) self.__realconnection.close() except NetWorkIOError: pass finally: self.__realconnection = None
def do_GET(self): path = urlparse.urlparse(self.path).path if path == "/log": return self.req_log_handler() elif path == "/status": return self.req_status_handler() else: xlog.debug('GAEProxy Web_control %s %s %s ', self.address_string(), self.command, self.path) if path == '/deploy': return self.req_deploy_handler() elif path == "/config": return self.req_config_handler() elif path == "/ip_list": return self.req_ip_list_handler() elif path == "/scan_ip": return self.req_scan_ip_handler() elif path == "/ssl_pool": return self.req_ssl_pool_handler() elif path == "/download_cert": return self.req_download_cert_handler() elif path == "/is_ready": return self.req_is_ready_handler() elif path == "/test_ip": return self.req_test_ip_handler() elif path == "/check_ip": return self.req_check_ip_handler() elif path == "/quit": connect_control.keep_running = False data = "Quit" self.wfile.write(( 'HTTP/1.1 200\r\nContent-Type: %s\r\nContent-Length: %s\r\n\r\n' % ('text/plain', len(data))).encode()) self.wfile.write(data) #sys.exit(0) #quit() #os._exit(0) return elif path.startswith("/wizard/"): file_path = os.path.abspath( os.path.join(web_ui_path, '/'.join(path.split('/')[1:]))) if not os.path.isfile(file_path): self.wfile.write(b'HTTP/1.1 404 Not Found\r\n\r\n') xlog.warn('%s %s %s wizard file %s not found', self.address_string(), self.command, self.path, file_path) return if file_path.endswith('.html'): mimetype = 'text/html' elif file_path.endswith('.png'): mimetype = 'image/png' elif file_path.endswith('.jpg') or file_path.endswith('.jpeg'): mimetype = 'image/jpeg' else: mimetype = 'application/octet-stream' self.send_file(file_path, mimetype) return else: xlog.warn('Control Req %s %s %s ', self.address_string(), self.command, self.path) # check for '..', which will leak file if re.search(r'(\.{2})', self.path) is not None: self.wfile.write(b'HTTP/1.1 404\r\n\r\n') xlog.warn('%s %s %s haking', self.address_string(), self.command, self.path) return filename = os.path.normpath('./' + path) if self.path.startswith(('http://', 'https://')): data = b'HTTP/1.1 200\r\nCache-Control: max-age=86400\r\nExpires:Oct, 01 Aug 2100 00:00:00 GMT\r\nConnection: close\r\n' data += b'\r\n' self.wfile.write(data) xlog.info('%s "%s %s HTTP/1.1" 200 -', self.address_string(), self.command, self.path) elif os.path.isfile(filename): if filename.endswith('.pac'): mimetype = 'text/plain' else: mimetype = 'application/octet-stream' #self.send_file(filename, mimetype) else: self.wfile.write( b'HTTP/1.1 404\r\nContent-Type: text/plain\r\nConnection: close\r\n\r\n404 Not Found' ) xlog.info('%s "%s %s HTTP/1.1" 404 -', self.address_string(), self.command, self.path)
def handler(method, url, headers, body, wfile): time_request = time.time() headers = clean_empty_header(headers) errors = [] response = None while True: if time.time() - time_request > 30: #time out return return_fail_message(wfile) try: response = fetch(method, url, headers, body) if response.app_status != 200: xlog.warn("fetch gae status:%s url:%s", response.app_status, url) try: server_type = response.getheader('Server', "") if "gws" not in server_type and "Google Frontend" not in server_type: xlog.warn("IP:%s not support GAE, server type:%s", response.ssl_sock.ip, server_type) google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True) response.close() continue except Exception as e: errors.append(e) xlog.warn('gae_handler.handler %r %s , retry...', e, url) continue if response.app_status == 404: xlog.warning('APPID %r not exists, remove it.', response.ssl_sock.appid) appid_manager.report_not_exist(response.ssl_sock.appid) google_ip.report_connect_closed(response.ssl_sock.ip, "appid not exist") appid = appid_manager.get_appid() if not appid: html = generate_message_html('404 No usable Appid Exists', u'没有可用appid了,请配置可用的appid') send_response(wfile, 404, body=html.encode('utf-8')) response.close() return else: response.close() continue if response.app_status == 403 or response.app_status == 405: #Method not allowed # google have changed from gws to gvs, need to remove. xlog.warning('405 Method not allowed. remove %s ', response.ssl_sock.ip) # some ip can connect, and server type is gws # but can't use as GAE server # so we need remove it immediately google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True) response.close() continue if response.app_status == 503: xlog.warning('APPID %r out of Quota, remove it.', response.ssl_sock.appid) appid_manager.report_out_of_quota(response.ssl_sock.appid) google_ip.report_connect_closed(response.ssl_sock.ip, "get_timeout") appid = appid_manager.get_appid() if not appid: html = generate_message_html('503 No usable Appid Exists', u'appid流量不足,请增加appid') send_response(wfile, 503, body=html.encode('utf-8')) response.close() return else: response.close() continue if response.app_status < 500: break except GAE_Exception as e: errors.append(e) xlog.warn("gae_exception:%r %s", e, url) except Exception as e: errors.append(e) xlog.exception('gae_handler.handler %r %s , retry...', e, url) if response.status == 206: return RangeFetch(method, url, headers, body, response, wfile).fetch() try: wfile.write("HTTP/1.1 %d %s\r\n" % (response.status, response.reason)) response_headers = {} for key, value in response.getheaders(): key = key.title() if key == 'Transfer-Encoding': #http://en.wikipedia.org/wiki/Chunked_transfer_encoding continue if key in skip_headers: continue response_headers[key] = value if 'X-Head-Content-Length' in response_headers: if method == "HEAD": response_headers['Content-Length'] = response_headers[ 'X-Head-Content-Length'] del response_headers['X-Head-Content-Length'] send_to_browser = True try: for key in response_headers: value = response_headers[key] send_header(wfile, key, value) #logging.debug("Head- %s: %s", key, value) wfile.write("\r\n") except Exception as e: send_to_browser = False xlog.warn("gae_handler.handler send response fail. t:%d e:%r %s", time.time() - time_request, e, url) if len(response.app_msg): xlog.warn("APPID error:%d url:%s", response.status, url) wfile.write(response.app_msg) google_ip.report_connect_closed(response.ssl_sock.ip, "app err") response.close() return content_length = int(response.getheader('Content-Length', 0)) content_range = response.getheader('Content-Range', '') if content_range: start, end, length = tuple( int(x) for x in re.search(r'bytes (\d+)-(\d+)/(\d+)', content_range).group(1, 2, 3)) else: start, end, length = 0, content_length - 1, content_length body_length = end - start + 1 last_read_time = time.time() time_response = time.time() while True: if start > end: time_finished = time.time() if body_length > 1024 and time_finished - time_response > 0: speed = body_length / (time_finished - time_response) xlog.info("GAE %d|%s|%d t:%d s:%d hs:%d Spd:%d %d %s", response.ssl_sock.fd, response.ssl_sock.ip, response.ssl_sock.received_size, (time_finished - time_request) * 1000, length, response.ssl_sock.handshake_time, int(speed), response.status, url) else: xlog.info("GAE %d|%s|%d t:%d s:%d hs:%d %d %s", response.ssl_sock.fd, response.ssl_sock.ip, response.ssl_sock.received_size, (time_finished - time_request) * 1000, length, response.ssl_sock.handshake_time, response.status, url) response.ssl_sock.received_size += body_length google_ip.report_ip_traffic(response.ssl_sock.ip, body_length) https_manager.save_ssl_connection_for_reuse( response.ssl_sock, call_time=time_request) return data = response.read(config.AUTORANGE_BUFSIZE) if not data: if time.time() - last_read_time > 20: google_ip.report_connect_closed(response.ssl_sock.ip, "down fail") response.close() xlog.warn("read timeout t:%d len:%d left:%d %s", (time.time() - time_request) * 1000, length, (end - start), url) return else: time.sleep(0.1) continue last_read_time = time.time() data_len = len(data) start += data_len if send_to_browser: try: ret = wfile.write(data) if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ: xlog.debug("send to browser wfile.write ret:%d", ret) ret = wfile.write(data) except Exception as e_b: if e_b[0] in (errno.ECONNABORTED, errno.EPIPE, errno.ECONNRESET ) or 'bad write retry' in repr(e_b): xlog.warn('gae_handler send to browser return %r %r', e_b, url) else: xlog.warn('gae_handler send to browser return %r %r', e_b, url) send_to_browser = False except NetWorkIOError as e: time_except = time.time() time_cost = time_except - time_request if e[0] in (errno.ECONNABORTED, errno.EPIPE) or 'bad write retry' in repr(e): xlog.warn("gae_handler err:%r time:%d %s ", e, time_cost, url) google_ip.report_connect_closed(response.ssl_sock.ip, "Net") else: xlog.exception("gae_handler except:%r %s", e, url) except Exception as e: xlog.exception("gae_handler except:%r %s", e, url)
def fetch(self): response_headers = dict( (k.title(), v) for k, v in self.response.getheaders()) content_range = response_headers['Content-Range'] start, end, length = tuple( int(x) for x in re.search(r'bytes (\d+)-(\d+)/(\d+)', content_range).group(1, 2, 3)) if start == 0: response_headers['Content-Length'] = str(length) del response_headers['Content-Range'] else: response_headers['Content-Range'] = 'bytes %s-%s/%s' % (start, end, length) response_headers['Content-Length'] = str(length - start) xlog.info('>>>>>>>>>>>>>>> RangeFetch started(%r) %d-%d', self.url, start, end) try: self.wfile.write("HTTP/1.1 200 OK\r\n") for key in response_headers: if key == 'Transfer-Encoding': continue if key == 'X-Head-Content-Length': continue if key in skip_headers: continue value = response_headers[key] #logging.debug("Head %s: %s", key.title(), value) send_header(self.wfile, key, value) self.wfile.write("\r\n") except Exception as e: self._stopped = True xlog.warn("RangeFetch send response fail:%r %s", e, self.url) return data_queue = Queue.PriorityQueue() range_queue = Queue.PriorityQueue() range_queue.put((start, end, self.response)) self.expect_begin = start for begin in range(end + 1, length, self.maxsize): range_queue.put((begin, min(begin + self.maxsize - 1, length - 1), None)) for i in xrange(0, self.threads): range_delay_size = i * self.maxsize spawn_later( float(range_delay_size) / self.waitsize, self.__fetchlet, range_queue, data_queue, range_delay_size) has_peek = hasattr(data_queue, 'peek') peek_timeout = 120 while self.expect_begin < length - 1: try: if has_peek: begin, data = data_queue.peek(timeout=peek_timeout) if self.expect_begin == begin: data_queue.get() elif self.expect_begin < begin: time.sleep(0.1) continue else: xlog.error( 'RangeFetch Error: begin(%r) < expect_begin(%r), quit.', begin, self.expect_begin) break else: begin, data = data_queue.get(timeout=peek_timeout) if self.expect_begin == begin: pass elif self.expect_begin < begin: data_queue.put((begin, data)) time.sleep(0.1) continue else: xlog.error( 'RangeFetch Error: begin(%r) < expect_begin(%r), quit.', begin, self.expect_begin) break except Queue.Empty: xlog.error('data_queue peek timeout, break') break try: ret = self.wfile.write(data) if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ: xlog.debug("send to browser wfile.write ret:%d, retry", ret) ret = self.wfile.write(data) xlog.debug("send to browser wfile.write ret:%d", ret) self.expect_begin += len(data) del data except Exception as e: xlog.warn('RangeFetch client closed(%s). %s', e, self.url) break self._stopped = True
def do_CONNECT_DIRECT(self): """deploy fake cert to client""" host, _, port = self.path.rpartition(':') port = int(port) if port != 443: xlog.warn("CONNECT %s port:%d not support", host, port) return certfile = CertUtil.get_cert(host) xlog.info('GAE %s %s:%d ', self.command, host, port) self.__realconnection = None self.wfile.write(b'HTTP/1.1 200 OK\r\n\r\n') try: ssl_sock = ssl.wrap_socket(self.connection, keyfile=certfile, certfile=certfile, server_side=True) except ssl.SSLError as e: xlog.info('ssl error: %s, create full domain cert for host:%s', e, host) certfile = CertUtil.get_cert(host, full_name=True) return except Exception as e: if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET): xlog.exception( 'ssl.wrap_socket(self.connection=%r) failed: %s path:%s, errno:%s', self.connection, e, self.path, e.args[0]) return self.__realconnection = self.connection self.__realwfile = self.wfile self.__realrfile = self.rfile self.connection = ssl_sock self.rfile = self.connection.makefile('rb', self.bufsize) self.wfile = self.connection.makefile('wb', 0) try: self.raw_requestline = self.rfile.readline(65537) if len(self.raw_requestline) > 65536: self.requestline = '' self.request_version = '' self.command = '' self.send_error(414) return if not self.raw_requestline: self.close_connection = 1 return if not self.parse_request(): return except NetWorkIOError as e: if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET, errno.EPIPE): raise if self.path[0] == '/' and host: self.path = 'https://%s%s' % (self.headers['Host'], self.path) xlog.debug('GAE CONNECT Direct %s %s', self.command, self.path) try: if self.path[0] == '/' and host: self.path = 'http://%s%s' % (host, self.path) elif not host and '://' in self.path: host = urlparse.urlparse(self.path).netloc self.parsed_url = urlparse.urlparse(self.path) if len(self.parsed_url[4]): path = '?'.join([self.parsed_url[2], self.parsed_url[4]]) else: path = self.parsed_url[2] request_headers = dict( (k.title(), v) for k, v in self.headers.items()) payload = b'' if 'Content-Length' in request_headers: try: payload_len = int(request_headers.get('Content-Length', 0)) #logging.debug("payload_len:%d %s %s", payload_len, self.command, self.path) payload = self.rfile.read(payload_len) except NetWorkIOError as e: xlog.error('handle_method_urlfetch read payload failed:%s', e) return direct_handler.handler(self.command, host, path, request_headers, payload, self.wfile) except NetWorkIOError as e: if e.args[0] not in (errno.ECONNABORTED, errno.ETIMEDOUT, errno.EPIPE): raise finally: if self.__realconnection: try: self.__realconnection.shutdown(socket.SHUT_WR) self.__realconnection.close() except NetWorkIOError: pass finally: self.__realconnection = None
google_ip.report_connect_closed(response.ssl_sock.ip, "receive fail") response.close() xlog.warn("read timeout t:%d len:%d left:%d %s %s", (time.time()-time_request)*1000, length, (end-start), host, url) return else: time.sleep(0.1) continue time_last_read = time.time() data_len = len(data) start += data_len if send_to_browser: try: ret = wfile.write(data) if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ: xlog.debug("send to browser wfile.write ret:%d", ret) ret = wfile.write(data) except Exception as e_b: if e_b[0] in (errno.ECONNABORTED, errno.EPIPE, errno.ECONNRESET) or 'bad write retry' in repr(e_b): xlog.warn('direct_handler send to browser return %r %s %r', e_b, host, url) else: xlog.warn('direct_handler send to browser return %r %s %r', e_b, host, url) send_to_browser = False except NetWorkIOError as e: google_ip.report_connect_closed(response.ssl_sock.ip, "receive fail") time_except = time.time() time_cost = time_except - time_request if e[0] in (errno.ECONNABORTED, errno.EPIPE) or 'bad write retry' in repr(e): xlog.exception("direct_handler err:%r %s %s time:%d", e, host, url, time_cost)
def report_not_exist(self, appid, ip): xlog.debug("report_not_exist:%s %s", appid, ip) th = threading.Thread(target=self.process_appid_not_exist, args=(appid, ip)) th.start()
def get_gws_ip(self): self.try_sort_gws_ip() self.ip_lock.acquire() try: ip_num = len(self.gws_ip_list) if ip_num == 0: #logging.warning("no gws ip") time.sleep(10) return None for i in range(ip_num): time_now = time.time() if self.gws_ip_pointer >= ip_num: if time_now - self.gws_ip_pointer_reset_time < 1: time.sleep(1) continue else: self.gws_ip_pointer = 0 self.gws_ip_pointer_reset_time = time_now elif self.gws_ip_pointer > 0 and time_now - self.gws_ip_pointer_reset_time > 3: self.gws_ip_pointer = 0 self.gws_ip_pointer_reset_time = time_now ip_str = self.gws_ip_list[self.gws_ip_pointer] get_time = self.ip_dict[ip_str]["get_time"] if time_now - get_time < self.ip_connect_interval: self.gws_ip_pointer += 1 continue if time_now - self.ip_dict[ip_str][ 'success_time'] > 300: # 5 min fail_connect_interval = 1800 # 30 min else: fail_connect_interval = 120 # 2 min fail_time = self.ip_dict[ip_str]["fail_time"] if time_now - fail_time < fail_connect_interval: self.gws_ip_pointer += 1 continue if self.trafic_control: # not check now active_time = self.ip_dict[ip_str]['data_active'] transfered_data = self.ip_dict[ip_str][ 'transfered_data'] - ( (time_now - active_time) * config.ip_traffic_quota) if transfered_data > config.ip_traffic_quota_base: self.gws_ip_pointer += 1 continue if self.ip_dict[ip_str]['links'] >= config.max_links_per_ip: self.gws_ip_pointer += 1 continue handshake_time = self.ip_dict[ip_str]["handshake_time"] xlog.debug("get ip:%s t:%d", ip_str, handshake_time) self.append_ip_history(ip_str, "get") self.ip_dict[ip_str]['get_time'] = time_now self.ip_dict[ip_str]['links'] += 1 self.gws_ip_pointer += 1 return ip_str except Exception as e: xlog.error("get_gws_ip fail:%s", e) traceback.print_exc() finally: self.ip_lock.release()