def connect_thread(self, sleep_time=0): time.sleep(sleep_time) try: while self.new_conn_pool.qsize() < self.connection_pool_min_num: if self.new_conn_pool.qsize() >= self.connection_pool_min_num: #xlog.debug("get enough conn") break ip_str = google_ip.get_gws_ip() if not ip_str: xlog.warning("no gws ip") break port = 443 #logging.debug("create ssl conn %s", ip_str) ssl_sock = self._create_ssl_connection((ip_str, port)) if ssl_sock: ssl_sock.last_use_time = time.time() self.new_conn_pool.put((ssl_sock.handshake_time, ssl_sock)) elif not connect_control.allow_connect(): break time.sleep(1) finally: self.thread_num_lock.acquire() self.thread_num -= 1 self.thread_num_lock.release()
def check_all_exist_ip(): good_ip_file_name = "good_ip.txt" good_ip_file = os.path.abspath( os.path.join(config.DATA_PATH, good_ip_file_name)) if not os.path.isfile(good_ip_file): print "open file ", good_ip_file_name, " fail." return with open(good_ip_file, "r") as fd: lines = fd.readlines() for line in lines: try: str_l = line.split(' ') if len(str_l) != 4: xlog.warning("line err: %s", line) continue ip_str = str_l[0] domain = str_l[1] server = str_l[2] handshake_time = int(str_l[3]) xlog.info("test ip: %s time:%d domain:%s server:%s", ip_str, handshake_time, domain, server) #test_with_app(ip_str) test_gws(ip_str) #self.add_ip(ip_str, handshake_time, domain, server) except Exception as e: xlog.exception("load_ip line:%s err:%s", line, e)
def create_connection_worker(self, type="gae"): try: while connect_control.keep_running: if type == "gae": if (self.new_conn_pool.qsize() + self.gae_conn_pool.qsize() ) >= self.connection_pool_min_num: break else: if self.new_conn_pool.qsize( ) >= self.connection_pool_min_num: break ip_str = google_ip.get_gws_ip() if not ip_str: xlog.warning("no gws ip") break port = 443 #logging.debug("create ssl conn %s", ip_str) connect_control.start_connect_register(True) ssl_sock = self._create_ssl_connection((ip_str, port)) connect_control.end_connect_register(True) if ssl_sock: ssl_sock.last_use_time = time.time() self.new_conn_pool.put((ssl_sock.handshake_time, ssl_sock)) elif not connect_control.allow_connect(): break time.sleep(1) finally: self.thread_num_lock.acquire() self.thread_num -= 1 self.thread_num_lock.release()
def create_connection_worker(self): try: while connect_control.keep_running: if self.new_conn_pool.qsize() >= self.connection_pool_min_num: #xlog.debug("get enough conn") break if self.thread_num > self.connection_pool_min_num - self.new_conn_pool.qsize(): break ip_str = google_ip.get_gws_ip() if not ip_str: xlog.warning("no gws ip") break port = 443 #logging.debug("create ssl conn %s", ip_str) connect_control.start_connect_register(True) ssl_sock = self._create_ssl_connection( (ip_str, port) ) connect_control.end_connect_register(True) if ssl_sock: ssl_sock.last_use_time = time.time() self.new_conn_pool.put((ssl_sock.handshake_time, ssl_sock)) elif not connect_control.allow_connect(): xlog.debug("create_connection_worker, control not allow") time.sleep(10) time.sleep(1) finally: self.thread_num_lock.acquire() self.thread_num -= 1 self.thread_num_lock.release()
def load_ip(self): if os.path.isfile(self.good_ip_file): file_path = self.good_ip_file else: file_path = self.default_good_ip_file with open(file_path, "r") as fd: lines = fd.readlines() for line in lines: try: if line.startswith("#"): continue str_l = line.split(" ") if len(str_l) < 4: xlog.warning("line err: %s", line) continue ip_str = str_l[0] domain = str_l[1] server = str_l[2] handshake_time = int(str_l[3]) if len(str_l) > 4: fail_times = int(str_l[4]) else: fail_times = 0 # logging.info("load ip: %s time:%d domain:%s server:%s", ip_str, handshake_time, domain, server) self.add_ip(ip_str, handshake_time, domain, server, fail_times) except Exception as e: xlog.exception("load_ip line:%s err:%s", line, e) xlog.info("load google ip_list num:%d, gws num:%d", len(self.ip_dict), len(self.gws_ip_list)) self.try_sort_gws_ip(force=True)
def remove_windows_ca(name): import ctypes import ctypes.wintypes class CERT_CONTEXT(ctypes.Structure): _fields_ = [ ('dwCertEncodingType', ctypes.wintypes.DWORD), ('pbCertEncoded', ctypes.POINTER(ctypes.wintypes.BYTE)), ('cbCertEncoded', ctypes.wintypes.DWORD), ('pCertInfo', ctypes.c_void_p), ('hCertStore', ctypes.c_void_p),] try: crypt32 = ctypes.WinDLL(b'crypt32.dll'.decode()) store_handle = crypt32.CertOpenStore(10, 0, 0, 0x4000 | 0x20000, b'ROOT'.decode()) pCertCtx = crypt32.CertEnumCertificatesInStore(store_handle, None) while pCertCtx: certCtx = CERT_CONTEXT.from_address(pCertCtx) certdata = ctypes.string_at(certCtx.pbCertEncoded, certCtx.cbCertEncoded) cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_ASN1, certdata) if hasattr(cert, 'get_subject'): cert = cert.get_subject() cert_name = next((v for k, v in cert.get_components() if k == 'CN'), '') if cert_name and name == cert_name: crypt32.CertDeleteCertificateFromStore(crypt32.CertDuplicateCertificateContext(pCertCtx)) pCertCtx = crypt32.CertEnumCertificatesInStore(store_handle, pCertCtx) except Exception as e: xlog.warning('CertUtil.remove_windows_ca failed: %r', e)
def connect_thread(self, sleep_time=0): time.sleep(sleep_time) try: while self.new_conn_pool.qsize() < self.connection_pool_min_num: if self.new_conn_pool.qsize() >= self.connection_pool_min_num: #xlog.debug("get enough conn") break ip_str = google_ip.get_gws_ip() if not ip_str: xlog.warning("ip not enough") break port = 443 #logging.debug("create ssl conn %s", ip_str) ssl_sock = self._create_ssl_connection( (ip_str, port) ) if ssl_sock: ssl_sock.last_use_time = time.time() self.new_conn_pool.put((ssl_sock.handshake_time, ssl_sock)) elif not connect_control.allow_connect(): break time.sleep(1) finally: self.thread_num_lock.acquire() self.thread_num -= 1 self.thread_num_lock.release()
def load_ip(self): if os.path.isfile(self.good_ip_file): file_path = self.good_ip_file else: file_path = self.default_good_ip_file with open(file_path, "r") as fd: lines = fd.readlines() for line in lines: try: str_l = line.split(' ') if len(str_l) < 4: xlog.warning("line err: %s", line) continue ip_str = str_l[0] domain = str_l[1] server = str_l[2] handshake_time = int(str_l[3]) if len(str_l) > 4: fail_times = int(str_l[4]) else: fail_times = 0 #logging.info("load ip: %s time:%d domain:%s server:%s", ip_str, handshake_time, domain, server) self.add_ip(ip_str, handshake_time, domain, server, fail_times) except Exception as e: xlog.exception("load_ip line:%s err:%s", line, e) xlog.info("load google ip_list num:%d, gws num:%d", len(self.ip_dict), len(self.gws_ip_list)) self.try_sort_gws_ip(force=True)
def create_connection_worker(self, type="gae"): try: while connect_control.keep_running: if type == "gae": if (self.new_conn_pool.qsize() + self.gae_conn_pool.qsize()) >= self.connection_pool_min_num: break else: if self.new_conn_pool.qsize() >= self.connection_pool_min_num: break ip_str = google_ip.get_gws_ip() if not ip_str: xlog.warning("no gws ip") break port = 443 #logging.debug("create ssl conn %s", ip_str) ssl_sock = self._create_ssl_connection( (ip_str, port) ) if ssl_sock: ssl_sock.last_use_time = time.time() self.new_conn_pool.put((ssl_sock.handshake_time, ssl_sock)) elif not connect_control.allow_connect(): break time.sleep(1) finally: self.thread_num_lock.acquire() self.thread_num -= 1 self.thread_num_lock.release()
def remove_windows_ca(name): import ctypes import ctypes.wintypes class CERT_CONTEXT(ctypes.Structure): _fields_ = [ ('dwCertEncodingType', ctypes.wintypes.DWORD), ('pbCertEncoded', ctypes.POINTER(ctypes.wintypes.BYTE)), ('cbCertEncoded', ctypes.wintypes.DWORD), ('pCertInfo', ctypes.c_void_p), ('hCertStore', ctypes.c_void_p), ] try: crypt32 = ctypes.WinDLL(b'crypt32.dll'.decode()) store_handle = crypt32.CertOpenStore(10, 0, 0, 0x4000 | 0x20000, b'ROOT'.decode()) pCertCtx = crypt32.CertEnumCertificatesInStore(store_handle, None) while pCertCtx: certCtx = CERT_CONTEXT.from_address(pCertCtx) certdata = ctypes.string_at(certCtx.pbCertEncoded, certCtx.cbCertEncoded) cert = OpenSSL.crypto.load_certificate( OpenSSL.crypto.FILETYPE_ASN1, certdata) if hasattr(cert, 'get_subject'): cert = cert.get_subject() cert_name = next( (v for k, v in cert.get_components() if k == 'CN'), '') if cert_name and name == cert_name: crypt32.CertDeleteCertificateFromStore( crypt32.CertDuplicateCertificateContext(pCertCtx)) pCertCtx = crypt32.CertEnumCertificatesInStore( store_handle, pCertCtx) except Exception as e: xlog.warning('CertUtil.remove_windows_ca failed: %r', e)
def import_debian_ca(common_name, ca_file): def get_debian_ca_sha1(nss_path): commonname = "GoAgent XX-Net - GoAgent" #TODO: here should be GoAgent - XX-Net cmd = [ 'certutil', '-L', '-d', 'sql:%s' % nss_path, '-n', commonname ] lines = get_cmd_out(cmd) get_sha1_title = False sha1 = "" for line in lines: if line.endswith("Fingerprint (SHA1):\n"): get_sha1_title = True continue if get_sha1_title: sha1 = line break sha1 = sha1.replace(' ', '').replace(':', '').replace('\n', '') if len(sha1) != 40: return False else: return sha1 home_path = os.path.expanduser("~") nss_path = os.path.join(home_path, ".pki/nssdb") if not os.path.isdir(nss_path): return False if not any( os.path.isfile('%s/certutil' % x) for x in os.environ['PATH'].split(os.pathsep)): xlog.warning( 'please install *libnss3-tools* package to import GoAgent root ca' ) return False sha1 = get_debian_ca_sha1(nss_path) ca_hash = CertUtil.ca_thumbprint.replace(':', '') if sha1 == ca_hash: xlog.info("system cert exist") return # shell command to list all cert # certutil -L -d sql:$HOME/.pki/nssdb # remove old cert first cmd_line = 'certutil -L -d sql:$HOME/.pki/nssdb |grep "GoAgent" && certutil -d sql:$HOME/.pki/nssdb -D -n "%s" ' % ( common_name) os.system(cmd_line) # install new cert cmd_line = 'certutil -d sql:$HOME/.pki/nssdb -A -t "C,," -n "%s" -i "%s"' % ( common_name, ca_file) os.system(cmd_line) return True
def import_ubuntu_system_ca(common_name, certfile): import platform platform_distname = platform.dist()[0] if platform_distname != 'Ubuntu': return pemfile = "/etc/ssl/certs/CA.pem" new_certfile = "/usr/local/share/ca-certificates/CA.crt" if not os.path.exists(pemfile) or not CertUtil.file_is_same(certfile, new_certfile): if os.system('cp "%s" "%s" && update-ca-certificates' % (certfile, new_certfile)) != 0: xlog.warning('install root certificate failed, Please run as administrator/root/sudo')
def import_debian_ca(common_name, ca_file): def get_debian_ca_sha1(nss_path): commonname = "GoAgent XX-Net - GoAgent" #TODO: here should be GoAgent - XX-Net cmd = ['certutil', '-L','-d', 'sql:%s' % nss_path, '-n', commonname] lines = get_cmd_out(cmd) get_sha1_title = False sha1 = "" for line in lines: if line.endswith("Fingerprint (SHA1):\n"): get_sha1_title = True continue if get_sha1_title: sha1 = line break sha1 = sha1.replace(' ', '').replace(':', '').replace('\n', '') if len(sha1) != 40: return False else: return sha1 home_path = os.path.expanduser("~") nss_path = os.path.join(home_path, ".pki/nssdb") if not os.path.isdir(nss_path): return False if not any(os.path.isfile('%s/certutil' % x) for x in os.environ['PATH'].split(os.pathsep)): xlog.warning('please install *libnss3-tools* package to import GoAgent root ca') return False sha1 = get_debian_ca_sha1(nss_path) ca_hash = CertUtil.ca_thumbprint.replace(':', '') if sha1 == ca_hash: xlog.info("system cert exist") return # shell command to list all cert # certutil -L -d sql:$HOME/.pki/nssdb # remove old cert first cmd_line = 'certutil -L -d sql:$HOME/.pki/nssdb |grep "GoAgent" && certutil -d sql:$HOME/.pki/nssdb -D -n "%s" ' % ( common_name) os.system(cmd_line) # install new cert cmd_line = 'certutil -d sql:$HOME/.pki/nssdb -A -t "C,," -n "%s" -i "%s"' % (common_name, ca_file) os.system(cmd_line) return True
def import_linux_firefox_ca(common_name, ca_file): firefox_config_path = CertUtil.get_linux_firefox_path() if not firefox_config_path: return False if not any(os.path.isfile('%s/certutil' % x) for x in os.environ['PATH'].split(os.pathsep)): xlog.warning('please install *libnss3-tools* package to import GoAgent root ca') return False cmd_line = 'certutil -L -d %s |grep "GoAgent" &&certutil -d %s -D -n "%s" ' % (firefox_config_path, firefox_config_path, common_name) os.system(cmd_line) # remove old cert first cmd_line = 'certutil -d %s -A -t "C,," -n "%s" -i "%s"' % (firefox_config_path, common_name, ca_file) os.system(cmd_line) # install new cert return True
def import_ubuntu_system_ca(common_name, certfile): import platform platform_distname = platform.dist()[0] if platform_distname != 'Ubuntu': return pemfile = "/etc/ssl/certs/CA.pem" new_certfile = "/usr/local/share/ca-certificates/CA.crt" if not os.path.exists(pemfile) or not CertUtil.file_is_same( certfile, new_certfile): if os.system('cp "%s" "%s" && update-ca-certificates' % (certfile, new_certfile)) != 0: xlog.warning( 'install root certificate failed, Please run as administrator/root/sudo' )
def connect_process(self): try: ip_str = google_ip.get_gws_ip() if not ip_str: xlog.warning("no gws ip") return port = 443 #logging.debug("create ssl conn %s", ip_str) ssl_sock = self._create_ssl_connection( (ip_str, port) ) if ssl_sock: ssl_sock.last_use_time = time.time() self.new_conn_pool.put((ssl_sock.handshake_time, ssl_sock)) finally: self.thread_num_lock.acquire() self.thread_num -= 1 self.thread_num_lock.release()
def connect_process(self): try: ip_str = google_ip.get_gws_ip() if not ip_str: xlog.warning("no gws ip") return port = 443 #logging.debug("create ssl conn %s", ip_str) ssl_sock = self._create_ssl_connection((ip_str, port)) if ssl_sock: ssl_sock.last_use_time = time.time() self.new_conn_pool.put((ssl_sock.handshake_time, ssl_sock)) finally: self.thread_num_lock.acquire() self.thread_num -= 1 self.thread_num_lock.release()
def load_ip(self): if os.path.isfile(self.good_ip_file): file_path = self.good_ip_file else: file_path = self.default_good_ip_file with open(file_path, "r") as fd: lines = fd.readlines() for line in lines: try: str_l = line.split(' ') if len(str_l) != 4: xlog.warning("line err: %s", line) continue ip_str = str_l[0] domain = str_l[1] server = str_l[2] handshake_time = int(str_l[3]) #logging.info("load ip: %s time:%d domain:%s server:%s", ip_str, handshake_time, domain, server) self.add_ip(ip_str, handshake_time, domain, server) except Exception as e: xlog.exception("load_ip line:%s err:%s", line, e) xlog.info("load google ip_list num:%d, gws num:%d", len(self.ip_dict), len(self.gws_ip_list)) self.try_sort_ip_by_handshake_time(force=True) if os.path.isfile(self.bad_ip_file): with open(self.bad_ip_file, "r") as fd: for line in fd.readlines(): try: if line == "\n": continue str_l = line.replace('\n', '') if not ip_utils.check_ip_valid(str_l): xlog.warning("bad_ip line err: %s", line) continue ip = str_l[1] self.bad_ip_pool.add(ip) except Exception as e: xlog.exception("parse bad_ip.txt err:%r", e)
def import_linux_firefox_ca(common_name, ca_file): firefox_config_path = CertUtil.get_linux_firefox_path() if not firefox_config_path: return False if not any( os.path.isfile('%s/certutil' % x) for x in os.environ['PATH'].split(os.pathsep)): xlog.warning( 'please install *libnss3-tools* package to import GoAgent root ca' ) return False cmd_line = 'certutil -L -d %s |grep "GoAgent" &&certutil -d %s -D -n "%s" ' % ( firefox_config_path, firefox_config_path, common_name) os.system(cmd_line) # remove old cert first cmd_line = 'certutil -d %s -A -t "C,," -n "%s" -i "%s"' % ( firefox_config_path, common_name, ca_file) os.system(cmd_line) # install new cert return True
def create_connection(self, host="", port=443, sock_life=5): if port != 443: xlog.warn("forward port %d not supported.", port) return None def _create_connection(ip_port, delay=0): time.sleep(delay) ip = ip_port[0] sock = None # start connection time record start_time = time.time() conn_time = 0 try: # create a ipv4/ipv6 socket object if config.PROXY_ENABLE: sock = socks.socksocket(socket.AF_INET if ':' not in ip else socket.AF_INET6) else: sock = socket.socket(socket.AF_INET if ':' not in ip else socket.AF_INET6) # set reuseaddr option to avoid 10048 socket error sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # resize socket recv buffer 8K->32K to improve browser releated application performance sock.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 32*1024) # disable negal algorithm to send http request quickly. sock.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, True) # set a short timeout to trigger timeout retry more quickly. sock.settimeout(self.timeout) # TCP connect sock.connect(ip_port) # record TCP connection time conn_time = time.time() - start_time xlog.debug("tcp conn %s time:%d", ip, conn_time * 1000) google_ip.update_ip(ip, conn_time * 2000) #logging.info("create_tcp update ip:%s time:%d", ip, conn_time * 2000) # put ssl socket object to output queobj #sock.ip = ip self.tcp_connection_cache.put((time.time(), sock)) except Exception as e: conn_time = int((time.time() - start_time) * 1000) xlog.debug("tcp conn %s fail t:%d", ip, conn_time) google_ip.report_connect_fail(ip) #logging.info("create_tcp report fail ip:%s", ip) if sock: sock.close() finally: self.thread_num_lock.acquire() self.thread_num -= 1 self.thread_num_lock.release() if host != "appengine.google.com": while True: try: ctime, sock = self.tcp_connection_cache.get_nowait() if time.time() - ctime < sock_life: return sock else: sock.close() continue except Queue.Empty: break start_time = time.time() while time.time() - start_time < self.max_timeout: if self.thread_num < self.max_thread_num: if host == "appengine.google.com": ip = google_ip.get_host_ip("*.google.com") else: ip = google_ip.get_gws_ip() if not ip: xlog.error("no gws ip.") return addr = (ip, port) self.thread_num_lock.acquire() self.thread_num += 1 self.thread_num_lock.release() p = threading.Thread(target=_create_connection, args=(addr,)) p.start() try: ctime, sock = self.tcp_connection_cache.get(timeout=0.2) return sock except: continue xlog.warning('create tcp connection fail.')
def handler(method, url, headers, body, wfile): time_request = time.time() headers = clean_empty_header(headers) errors = [] response = None while True: if time.time() - time_request > 30: #time out return return_fail_message(wfile) try: response = fetch(method, url, headers, body) if response.app_status != 200: xlog.warn("fetch gae status:%s url:%s", response.app_status, url) if response.app_status == 404: xlog.warning('APPID %r not exists, remove it.', response.ssl_sock.appid) appid_manager.report_not_exist(response.ssl_sock.appid) appid = appid_manager.get_appid() if not appid: html = generate_message_html('404 No usable Appid Exists', u'没有可用appid了,请配置可用的appid') send_response(wfile, 404, body=html.encode('utf-8')) response.close() return else: response.close() continue if response.app_status == 403 or response.app_status == 405: #Method not allowed # google have changed from gws to gvs, need to remove. xlog.warning('405 Method not allowed. remove %s ', response.ssl_sock.ip) # some ip can connect, and server type is gws # but can't use as GAE server # so we need remove it immediately google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True) response.close() continue if response.app_status == 503: xlog.warning('APPID %r out of Quota, remove it.', response.ssl_sock.appid) appid_manager.report_out_of_quota(response.ssl_sock.appid) appid = appid_manager.get_appid() if not appid: html = generate_message_html('503 No usable Appid Exists', u'appid流量不足,请增加appid') send_response(wfile, 503, body=html.encode('utf-8')) response.close() return else: response.close() continue if response.app_status < 500: break except GAE_Exception as e: errors.append(e) xlog.warn("gae_exception:%r %s", e, url) except Exception as e: errors.append(e) xlog.exception('gae_handler.handler %r %s , retry...', e, url) if response.status == 206: return RangeFetch(method, url, headers, body, response, wfile).fetch() try: wfile.write("HTTP/1.1 %d %s\r\n" % (response.status, response.reason)) response_headers = {} for key, value in response.getheaders(): key = key.title() if key == 'Transfer-Encoding': #http://en.wikipedia.org/wiki/Chunked_transfer_encoding continue if key in skip_headers: continue response_headers[key] = value if 'X-Head-Content-Length' in response_headers: if method == "HEAD": response_headers['Content-Length'] = response_headers[ 'X-Head-Content-Length'] del response_headers['X-Head-Content-Length'] send_to_browser = True try: for key in response_headers: value = response_headers[key] send_header(wfile, key, value) #logging.debug("Head- %s: %s", key, value) wfile.write("\r\n") except Exception as e: send_to_browser = False xlog.warn("gae_handler.handler send response fail. t:%d e:%r %s", time.time() - time_request, e, url) if len(response.app_msg): xlog.warn("APPID error:%d url:%s", response.status, url) wfile.write(response.app_msg) response.close() return content_length = int(response.getheader('Content-Length', 0)) content_range = response.getheader('Content-Range', '') if content_range: start, end, length = tuple( int(x) for x in re.search(r'bytes (\d+)-(\d+)/(\d+)', content_range).group(1, 2, 3)) else: start, end, length = 0, content_length - 1, content_length last_read_time = time.time() while True: if start > end: https_manager.save_ssl_connection_for_reuse(response.ssl_sock) xlog.info("GAE t:%d s:%d %d %s", (time.time() - time_request) * 1000, length, response.status, url) return data = response.read(config.AUTORANGE_BUFSIZE) if not data: if time.time() - last_read_time > 20: response.close() xlog.warn("read timeout t:%d len:%d left:%d %s", (time.time() - time_request) * 1000, length, (end - start), url) return else: time.sleep(0.1) continue last_read_time = time.time() data_len = len(data) start += data_len if send_to_browser: try: ret = wfile.write(data) if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ: xlog.debug("send to browser wfile.write ret:%d", ret) ret = wfile.write(data) except Exception as e_b: if e_b[0] in (errno.ECONNABORTED, errno.EPIPE, errno.ECONNRESET ) or 'bad write retry' in repr(e_b): xlog.warn('gae_handler send to browser return %r %r', e_b, url) else: xlog.warn('gae_handler send to browser return %r %r', e_b, url) send_to_browser = False except NetWorkIOError as e: time_except = time.time() time_cost = time_except - time_request if e[0] in (errno.ECONNABORTED, errno.EPIPE) or 'bad write retry' in repr(e): xlog.warn("gae_handler err:%r time:%d %s ", e, time_cost, url) else: xlog.exception("gae_handler except:%r %s", e, url) except Exception as e: xlog.exception("gae_handler except:%r %s", e, url)
def __fetchlet(self, range_queue, data_queue, range_delay_size): headers = dict((k.title(), v) for k, v in self.headers.items()) headers['Connection'] = 'close' while not self._stopped: try: try: start, end, response = range_queue.get(timeout=1) if self.expect_begin < start and data_queue.qsize( ) * self.bufsize + range_delay_size > 30 * 1024 * 1024: range_queue.put((start, end, response)) time.sleep(10) continue headers['Range'] = 'bytes=%d-%d' % (start, end) if not response: response = fetch(self.method, self.url, headers, self.body) except Queue.Empty: continue except Exception as e: xlog.warning("RangeFetch fetch response %r in __fetchlet", e) range_queue.put((start, end, None)) continue if not response: xlog.warning('RangeFetch %s return %r', headers['Range'], response) range_queue.put((start, end, None)) continue if response.app_status != 200: xlog.warning('Range Fetch return %s "%s %s" %s ', response.app_status, self.method, self.url, headers['Range']) if response.app_status == 404: xlog.warning('APPID %r not exists, remove it.', response.ssl_sock.appid) appid_manager.report_not_exist(response.ssl_sock.appid) appid = appid_manager.get_appid() if not appid: xlog.error("no appid left") self._stopped = True response.close() return if response.app_status == 503: xlog.warning( 'APPID %r out of Quota, remove it temporary.', response.ssl_sock.appid) appid_manager.report_out_of_quota( response.ssl_sock.appid) appid = appid_manager.get_appid() if not appid: xlog.error("no appid left") self._stopped = True response.close() return response.close() range_queue.put((start, end, None)) continue if response.getheader('Location'): self.url = urlparse.urljoin(self.url, response.getheader('Location')) xlog.info('RangeFetch Redirect(%r)', self.url) response.close() range_queue.put((start, end, None)) continue if 200 <= response.status < 300: content_range = response.getheader('Content-Range') if not content_range: xlog.warning( 'RangeFetch "%s %s" return Content-Range=%r: response headers=%r, retry %s-%s', self.method, self.url, content_range, response.getheaders(), start, end) response.close() range_queue.put((start, end, None)) continue content_length = int( response.getheader('Content-Length', 0)) xlog.info('>>>>>>>>>>>>>>> [thread %s] %s %s', threading.currentThread().ident, content_length, content_range) time_last_read = time.time() while start < end + 1: try: data = response.read(self.bufsize) if not data: if time.time() - time_last_read > 20: break else: time.sleep(0.1) continue time_last_read = time.time() data_len = len(data) data_queue.put((start, data)) start += data_len except Exception as e: xlog.warning('RangeFetch "%s %s" %s failed: %s', self.method, self.url, headers['Range'], e) break if start < end + 1: xlog.warning('RangeFetch "%s %s" retry %s-%s', self.method, self.url, start, end) response.close() range_queue.put((start, end, None)) continue https_manager.save_ssl_connection_for_reuse( response.ssl_sock) xlog.info('>>>>>>>>>>>>>>> Successfully reached %d bytes.', start - 1) else: xlog.error('RangeFetch %r return %s', self.url, response.status) response.close() range_queue.put((start, end, None)) continue except StandardError as e: xlog.exception('RangeFetch._fetchlet error:%s', e) raise
def handler(method, url, headers, body, wfile): time_request = time.time() headers = clean_empty_header(headers) errors = [] response = None while True: if time.time() - time_request > 30: # time out return return_fail_message(wfile) try: response = fetch(method, url, headers, body) if response.app_status != 200: xlog.warn("fetch gae status:%s url:%s", response.app_status, url) if response.app_status == 404: server_type = response.getheader("Server", "") if "gws" not in server_type: xlog.warn("IP:%s not support GAE, server type:%s", response.ssl_sock.ip, server_type) google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True) response.close() continue xlog.warning("APPID %r not exists, remove it.", response.ssl_sock.appid) appid_manager.report_not_exist(response.ssl_sock.appid) appid = appid_manager.get_appid() if not appid: html = generate_message_html("404 No usable Appid Exists", u"没有可用appid了,请配置可用的appid") send_response(wfile, 404, body=html.encode("utf-8")) response.close() return else: response.close() continue if response.app_status == 403 or response.app_status == 405: # Method not allowed # google have changed from gws to gvs, need to remove. xlog.warning("405 Method not allowed. remove %s ", response.ssl_sock.ip) # some ip can connect, and server type is gws # but can't use as GAE server # so we need remove it immediately google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True) response.close() continue if response.app_status == 503: xlog.warning("APPID %r out of Quota, remove it.", response.ssl_sock.appid) appid_manager.report_out_of_quota(response.ssl_sock.appid) appid = appid_manager.get_appid() if not appid: html = generate_message_html("503 No usable Appid Exists", u"appid流量不足,请增加appid") send_response(wfile, 503, body=html.encode("utf-8")) response.close() return else: response.close() continue if response.app_status < 500: break except GAE_Exception as e: errors.append(e) xlog.warn("gae_exception:%r %s", e, url) except Exception as e: errors.append(e) xlog.exception("gae_handler.handler %r %s , retry...", e, url) if response.status == 206: return RangeFetch(method, url, headers, body, response, wfile).fetch() try: wfile.write("HTTP/1.1 %d %s\r\n" % (response.status, response.reason)) response_headers = {} for key, value in response.getheaders(): key = key.title() if key == "Transfer-Encoding": # http://en.wikipedia.org/wiki/Chunked_transfer_encoding continue if key in skip_headers: continue response_headers[key] = value if "X-Head-Content-Length" in response_headers: if method == "HEAD": response_headers["Content-Length"] = response_headers["X-Head-Content-Length"] del response_headers["X-Head-Content-Length"] send_to_browser = True try: for key in response_headers: value = response_headers[key] send_header(wfile, key, value) # logging.debug("Head- %s: %s", key, value) wfile.write("\r\n") except Exception as e: send_to_browser = False xlog.warn("gae_handler.handler send response fail. t:%d e:%r %s", time.time() - time_request, e, url) if len(response.app_msg): xlog.warn("APPID error:%d url:%s", response.status, url) wfile.write(response.app_msg) response.close() return content_length = int(response.getheader("Content-Length", 0)) content_range = response.getheader("Content-Range", "") if content_range: start, end, length = tuple( int(x) for x in re.search(r"bytes (\d+)-(\d+)/(\d+)", content_range).group(1, 2, 3) ) else: start, end, length = 0, content_length - 1, content_length last_read_time = time.time() while True: if start > end: https_manager.save_ssl_connection_for_reuse(response.ssl_sock) xlog.info("GAE t:%d s:%d %d %s", (time.time() - time_request) * 1000, length, response.status, url) return data = response.read(config.AUTORANGE_BUFSIZE) if not data: if time.time() - last_read_time > 20: response.close() xlog.warn( "read timeout t:%d len:%d left:%d %s", (time.time() - time_request) * 1000, length, (end - start), url, ) return else: time.sleep(0.1) continue last_read_time = time.time() data_len = len(data) start += data_len if send_to_browser: try: ret = wfile.write(data) if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ: xlog.debug("send to browser wfile.write ret:%d", ret) ret = wfile.write(data) except Exception as e_b: if e_b[0] in (errno.ECONNABORTED, errno.EPIPE, errno.ECONNRESET) or "bad write retry" in repr(e_b): xlog.warn("gae_handler send to browser return %r %r", e_b, url) else: xlog.warn("gae_handler send to browser return %r %r", e_b, url) send_to_browser = False except NetWorkIOError as e: time_except = time.time() time_cost = time_except - time_request if e[0] in (errno.ECONNABORTED, errno.EPIPE) or "bad write retry" in repr(e): xlog.warn("gae_handler err:%r time:%d %s ", e, time_cost, url) else: xlog.exception("gae_handler except:%r %s", e, url) except Exception as e: xlog.exception("gae_handler except:%r %s", e, url)
def create_connection(self, host="", port=443, sock_life=5): if port != 443: xlog.warn("forward port %d not supported.", port) return None def _create_connection(ip_port, delay=0): time.sleep(delay) ip = ip_port[0] sock = None # start connection time record start_time = time.time() conn_time = 0 try: # create a ipv4/ipv6 socket object if config.PROXY_ENABLE: sock = socks.socksocket(socket.AF_INET if ':' not in ip else socket.AF_INET6) else: sock = socket.socket(socket.AF_INET if ':' not in ip else socket.AF_INET6) # set reuseaddr option to avoid 10048 socket error sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # resize socket recv buffer 8K->32K to improve browser releated application performance sock.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 32 * 1024) # disable negal algorithm to send http request quickly. sock.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, True) # set a short timeout to trigger timeout retry more quickly. sock.settimeout(self.timeout) # TCP connect sock.connect(ip_port) # record TCP connection time conn_time = time.time() - start_time xlog.debug("tcp conn %s time:%d", ip, conn_time * 1000) google_ip.update_ip(ip, conn_time * 2000) #logging.info("create_tcp update ip:%s time:%d", ip, conn_time * 2000) # put ssl socket object to output queobj #sock.ip = ip self.tcp_connection_cache.put((time.time(), sock)) except Exception as e: conn_time = int((time.time() - start_time) * 1000) xlog.debug("tcp conn %s fail t:%d", ip, conn_time) google_ip.report_connect_fail(ip) #logging.info("create_tcp report fail ip:%s", ip) if sock: sock.close() finally: self.thread_num_lock.acquire() self.thread_num -= 1 self.thread_num_lock.release() if host != "appengine.google.com": while True: try: ctime, sock = self.tcp_connection_cache.get_nowait() if time.time() - ctime < sock_life: return sock else: sock.close() continue except Queue.Empty: break start_time = time.time() while time.time() - start_time < self.max_timeout: if self.thread_num < self.max_thread_num: if host == "appengine.google.com": ip = google_ip.get_host_ip("*.google.com") else: ip = google_ip.get_gws_ip() if not ip: xlog.error("no gws ip.") return addr = (ip, port) self.thread_num_lock.acquire() self.thread_num += 1 self.thread_num_lock.release() p = threading.Thread(target=_create_connection, args=(addr, )) p.daemon = True p.start() try: ctime, sock = self.tcp_connection_cache.get(timeout=0.2) return sock except: continue xlog.warning('create tcp connection fail.')
def __fetchlet(self, range_queue, data_queue, range_delay_size): headers = dict((k.title(), v) for k, v in self.headers.items()) headers["Connection"] = "close" while not self._stopped: try: try: start, end, response = range_queue.get(timeout=1) if ( self.expect_begin < start and data_queue.qsize() * self.bufsize + range_delay_size > 30 * 1024 * 1024 ): range_queue.put((start, end, response)) time.sleep(10) continue headers["Range"] = "bytes=%d-%d" % (start, end) if not response: response = fetch(self.method, self.url, headers, self.body) except Queue.Empty: continue except Exception as e: xlog.warning("RangeFetch fetch response %r in __fetchlet", e) range_queue.put((start, end, None)) continue if not response: xlog.warning("RangeFetch %s return %r", headers["Range"], response) range_queue.put((start, end, None)) continue if response.app_status != 200: xlog.warning( 'Range Fetch return %s "%s %s" %s ', response.app_status, self.method, self.url, headers["Range"], ) if response.app_status == 404: xlog.warning("APPID %r not exists, remove it.", response.ssl_sock.appid) appid_manager.report_not_exist(response.ssl_sock.appid) appid = appid_manager.get_appid() if not appid: xlog.error("no appid left") self._stopped = True response.close() return if response.app_status == 503: xlog.warning("APPID %r out of Quota, remove it temporary.", response.ssl_sock.appid) appid_manager.report_out_of_quota(response.ssl_sock.appid) appid = appid_manager.get_appid() if not appid: xlog.error("no appid left") self._stopped = True response.close() return response.close() range_queue.put((start, end, None)) continue if response.getheader("Location"): self.url = urlparse.urljoin(self.url, response.getheader("Location")) xlog.info("RangeFetch Redirect(%r)", self.url) response.close() range_queue.put((start, end, None)) continue if 200 <= response.status < 300: content_range = response.getheader("Content-Range") if not content_range: xlog.warning( 'RangeFetch "%s %s" return Content-Range=%r: response headers=%r, retry %s-%s', self.method, self.url, content_range, response.getheaders(), start, end, ) response.close() range_queue.put((start, end, None)) continue content_length = int(response.getheader("Content-Length", 0)) xlog.info( ">>>>>>>>>>>>>>> [thread %s] %s %s", threading.currentThread().ident, content_length, content_range, ) time_last_read = time.time() while start < end + 1: try: data = response.read(self.bufsize) if not data: if time.time() - time_last_read > 20: break else: time.sleep(0.1) continue time_last_read = time.time() data_len = len(data) data_queue.put((start, data)) start += data_len except Exception as e: xlog.warning('RangeFetch "%s %s" %s failed: %s', self.method, self.url, headers["Range"], e) break if start < end + 1: xlog.warning('RangeFetch "%s %s" retry %s-%s', self.method, self.url, start, end) response.close() range_queue.put((start, end, None)) continue https_manager.save_ssl_connection_for_reuse(response.ssl_sock) xlog.info(">>>>>>>>>>>>>>> Successfully reached %d bytes.", start - 1) else: xlog.error("RangeFetch %r return %s", self.url, response.status) response.close() range_queue.put((start, end, None)) continue except StandardError as e: xlog.exception("RangeFetch._fetchlet error:%s", e) raise