def keep_alive_thread(self): while self.keep_alive: time.sleep(1) try: sock_list = self.new_conn_pool.get_need_keep_alive( maxtime=self.keep_alive - 3) for ssl_sock in sock_list: ssl_sock.close() sock_list = self.gae_conn_pool.get_need_keep_alive( maxtime=self.keep_alive - 3) for ssl_sock in sock_list: # only keep little alive link. # if you have 25 appid, you can keep 5 alive link. if self.gae_conn_pool.qsize() > max( 1, len(appid_manager.working_appid_list) / 2): ssl_sock.close() continue #inactive_time = time.time() -ssl_sock.last_use_time #logging.debug("inactive_time:%d", inactive_time) if self.head_request(ssl_sock): self.save_ssl_connection_for_reuse(ssl_sock) else: ssl_sock.close() self.create_more_connection() except Exception as e: xlog.warn("keep alive except:%r", e)
def save(self): CONFIG_USER_FILENAME = os.path.abspath( os.path.join(root_path, 'data', 'gae_proxy', 'config.ini')) try: f = open(CONFIG_USER_FILENAME, 'w') if self.user_special.appid != "": f.write("[gae]\n") f.write("appid = %s\n" % self.user_special.appid) f.write("password = %s\n\n" % self.user_special.password) f.write("[proxy]\n") f.write("enable = %s\n" % self.user_special.proxy_enable) f.write("type = %s\n" % self.user_special.proxy_type) f.write("host = %s\n" % self.user_special.proxy_host) f.write("port = %s\n" % self.user_special.proxy_port) f.write("user = %s\n" % self.user_special.proxy_user) f.write("passwd = %s\n\n" % self.user_special.proxy_passwd) if self.user_special.host_appengine_mode != "gae": f.write("[hosts]\n") f.write("appengine.google.com = %s\n" % self.user_special.host_appengine_mode) f.write("www.google.com = %s\n\n" % self.user_special.host_appengine_mode) f.write("[google_ip]\n") if int(self.user_special.auto_adjust_scan_ip_thread_num) != self.DEFAULT_CONFIG.getint('google_ip', 'auto_adjust_scan_ip_thread_num'): f.write("auto_adjust_scan_ip_thread_num = %d\n\n" % int(self.user_special.auto_adjust_scan_ip_thread_num)) if int(self.user_special.scan_ip_thread_num) != self.DEFAULT_CONFIG.getint('google_ip', 'max_scan_ip_thread_num'): f.write("max_scan_ip_thread_num = %d\n\n" % int(self.user_special.scan_ip_thread_num)) if int(self.user_special.use_ipv6) != self.DEFAULT_CONFIG.getint('google_ip', 'use_ipv6'): f.write("use_ipv6 = %d\n\n" % int(self.user_special.use_ipv6)) f.close() except: xlog.warn("launcher.config save user config fail:%s", CONFIG_USER_FILENAME)
def load_tasks(): tasks = [] intervals = [] import imp tasks_path = config.tasks_path for fileName in os.listdir(tasks_path): if fileName.lower().endswith('.py'): xlog.info('start load %s' % fileName) try: with open(os.path.join(tasks_path, fileName), 'rb') as fpy: task = imp.load_source('Auto-tasks_%d_%s' % (len(tasks), fileName), tasks_path, fpy) name = task.name interval = task.run_interval task.init() tasks.append(task) intervals.append(interval) xlog.info('load task %s success.' % name) except: xlog.warn('load %s fail.' % fileName) del imp return (tasks, intervals)
def test2(self): work_ciphers = ["AES128-SHA"] for cipher in self.cipher_list: if cipher in work_ciphers: continue else: work_ciphers.append(cipher) xlog.debug("%s", cipher) cipher_suites = (work_ciphers) openssl_context = SSLConnection.context_builder( ca_certs=g_cacertfile, cipher_suites=cipher_suites) try: ssl, _, _ = connect_ssl(self.ip, openssl_context=openssl_context) server_type = test_server_type(ssl, self.ip) xlog.debug("%s", server_type) if "gws" not in server_type: work_ciphers.remove(cipher) except Exception as e: xlog.warn("err:%s", e) try: work_ciphers.remove(cipher) except: pass work_str = "" for cipher in work_ciphers: work_str += cipher + ":" xlog.info("work ciphers:%s", work_str)
def remove_ip_process(self): try: while connect_control.keep_running: try: ip_str = self.to_remove_ip_list.get_nowait() except: break result = check_ip.test(ip_str) if result and result.appspot_ok: self.add_ip(ip_str, result.handshake_time, result.domain, result.server_type) xlog.debug("remove ip process, restore ip:%s", ip_str) continue if not check_ip.network_is_ok(): self.to_remove_ip_list.put(ip_str) xlog.warn( "network is unreachable. check your network connection." ) return xlog.info("real remove ip:%s ", ip_str) self.iplist_need_save = 1 finally: self.remove_ip_thread_num_lock.acquire() self.remove_ip_thread_num -= 1 self.remove_ip_thread_num_lock.release()
def remove_ip_process(self): try: while True: try: ip_str = self.to_remove_ip_list.get_nowait() except: break result = check_ip.test(ip_str) if result and result.appspot_ok: self.add_ip(ip_str, result.handshake_time, result.domain, result.server_type) xlog.debug("remove ip process, restore ip:%s", ip_str) continue if not check_ip.network_is_ok(): self.to_remove_ip_list.put(ip_str) xlog.warn("network is unreachable. check your network connection.") return xlog.info("real remove ip:%s ", ip_str) self.iplist_need_save = 1 finally: self.remove_ip_thread_num_lock.acquire() self.remove_ip_thread_num -= 1 self.remove_ip_thread_num_lock.release()
def do_POST(self): try: refer = self.headers.getheader('Referer') netloc = urlparse.urlparse(refer).netloc if not netloc.startswith("127.0.0.1") and not netloc.startswitch("localhost"): xlog.warn("web control ref:%s refuse", netloc) return except: pass xlog.debug ('GAEProxy web_control %s %s %s ', self.address_string(), self.command, self.path) try: ctype, pdict = cgi.parse_header(self.headers.getheader('content-type')) if ctype == 'multipart/form-data': self.postvars = cgi.parse_multipart(self.rfile, pdict) elif ctype == 'application/x-www-form-urlencoded': length = int(self.headers.getheader('content-length')) self.postvars = urlparse.parse_qs(self.rfile.read(length), keep_blank_values=1) else: self.postvars = {} except: self.postvars = {} path = urlparse.urlparse(self.path).path if path == '/deploy': return self.req_deploy_handler() elif path == "/config": return self.req_config_handler() elif path == "/scan_ip": return self.req_scan_ip_handler() elif path.startswith("/importip"): return self.req_importip_handler() else: self.wfile.write(b'HTTP/1.1 404\r\nContent-Type: text/plain\r\nConnection: close\r\n\r\n404 Not Found') xlog.info('%s "%s %s HTTP/1.1" 404 -', self.address_string(), self.command, self.path)
def fetch(method, host, path, headers, payload, bufsize=8192): request_data = '%s %s HTTP/1.1\r\n' % (method, path) request_data += ''.join('%s: %s\r\n' % (k, v) for k, v in headers.items()) request_data += '\r\n' ssl_sock = https_manager.get_ssl_connection(host) if not ssl_sock: return ssl_sock.send(request_data.encode()) payload_len = len(payload) start = 0 while start < payload_len: send_size = min(payload_len - start, 65535) sended = ssl_sock.send(payload[start:start+send_size]) start += sended response = httplib.HTTPResponse(ssl_sock, buffering=True) response.ssl_sock = ssl_sock try: orig_timeout = ssl_sock.gettimeout() ssl_sock.settimeout(90) response.begin() ssl_sock.settimeout(orig_timeout) except httplib.BadStatusLine as e: xlog.warn("direct_handler.fetch bad status line:%r", e) response = None except Exception as e: xlog.warn("direct_handler.fetch:%r", e) return response
def test(ip_str, loop=1): xlog.info("==>%s", ip_str) check = Check_frame(ip_str, check_cert=False) for i in range(loop): result = check.check(callback=test_app_head) if not result: if "gws" in check.result.server_type: xlog.warn("ip:%s server_type:%s but appengine check fail.", ip_str, check.result.server_type) xlog.warn("check fail") #continue else: xlog.debug("=======app check ok: %s", ip_str) check.result.appspot_ok = result result = check.check(callback=test_server_type, check_ca=True) if not result: xlog.debug("test server type fail") continue check.result.server_type = result xlog.info("========== %s type:%s domain:%s handshake:%d", ip_str, check.result.server_type, check.result.domain, check.result.handshake_time) return check.result
def load_ip_range(self): self.ip_range_map = {} self.ip_range_list = [] self.ip_range_index = [] self.candidate_amount_ip = 0 content = self.load_range_content() lines = content.splitlines() for line in lines: if len(line) == 0 or line[0] == '#': continue try: begin, end = ip_utils.split_ip(line) nbegin = ip_utils.ip_string_to_num(begin) nend = ip_utils.ip_string_to_num(end) if not nbegin or not nend or nend < nbegin: xlog.warn("load ip range:%s fail", line) continue except Exception as e: xlog.exception("load ip range:%s fail:%r", line, e) continue self.ip_range_map[self.candidate_amount_ip] = [nbegin, nend] self.ip_range_list.append( [nbegin, nend] ) self.ip_range_index.append(self.candidate_amount_ip) num = nend - nbegin self.candidate_amount_ip += num # print ip_utils.ip_num_to_string(nbegin), ip_utils.ip_num_to_string(nend), num self.ip_range_index.sort()
def load_ip_range(self): self.ip_range_map = {} self.ip_range_list = [] self.ip_range_index = [] self.candidate_amount_ip = 0 content = self.load_range_content() lines = content.splitlines() for line in lines: if len(line) == 0 or line[0] == '#': continue try: begin, end = ip_utils.split_ip(line) nbegin = ip_utils.ip_string_to_num(begin) nend = ip_utils.ip_string_to_num(end) if not nbegin or not nend or nend < nbegin: xlog.warn("load ip range:%s fail", line) continue except Exception as e: xlog.exception("load ip range:%s fail:%r", line, e) continue self.ip_range_map[self.candidate_amount_ip] = [nbegin, nend] self.ip_range_list.append([nbegin, nend]) self.ip_range_index.append(self.candidate_amount_ip) num = nend - nbegin self.candidate_amount_ip += num # print ip_utils.ip_num_to_string(nbegin), ip_utils.ip_num_to_string(nend), num self.ip_range_index.sort()
def check_response(self, response): server_type = response.headers.get('Server', "") self.logger.debug("status:%d", response.status) self.logger.debug("Server type:%s", server_type) if response.status not in self.config.check_ip_accept_status: return False if response.status == 503: # out of quota if b"gws" not in server_type and b"Google Frontend" not in server_type and b"GFE" not in server_type: xlog.warn("503 but server type:%s", server_type) return False else: return True try: content = response.read() except Exception as e: if sys.version_info[0] == 3 and ( isinstance(e, ConnectionError) or isinstance(e, ConnectionResetError) or isinstance(e, BrokenPipeError) ): return False self.logger.warn("app check except:%r", e) return False if self.config.check_ip_content not in content: self.logger.warn("app check content:%s", content) return False return True
def load(self): ConfigParser.RawConfigParser.OPTCRE = re.compile(r'(?P<option>[^=\s][^=]*)\s*(?P<vi>[=])\s*(?P<value>.*)$') self.USER_CONFIG = ConfigParser.ConfigParser() CONFIG_USER_FILENAME = os.path.abspath( os.path.join(root_path, 'data', 'gae_proxy', 'config.ini')) self.DEFAULT_CONFIG = ConfigParser.ConfigParser() DEFAULT_CONFIG_FILENAME = os.path.abspath( os.path.join(current_path, 'proxy.ini')) try: if os.path.isfile(CONFIG_USER_FILENAME): self.USER_CONFIG.read(CONFIG_USER_FILENAME) else: return if os.path.isfile(DEFAULT_CONFIG_FILENAME): self.DEFAULT_CONFIG.read(DEFAULT_CONFIG_FILENAME) else: return try: self.user_special.appid = self.USER_CONFIG.get('gae', 'appid') self.user_special.password = self.USER_CONFIG.get('gae', 'password') except: pass try: self.user_special.host_appengine_mode = self.USER_CONFIG.get('hosts', 'appengine.google.com') except: pass try: self.user_special.ip_connect_interval = config.CONFIG.getint('google_ip', 'ip_connect_interval') except: pass try: self.user_special.scan_ip_thread_num = config.CONFIG.getint('google_ip', 'max_scan_ip_thread_num') except: self.user_special.scan_ip_thread_num = self.DEFAULT_CONFIG.getint('google_ip', 'max_scan_ip_thread_num') try: self.user_special.auto_adjust_scan_ip_thread_num = config.CONFIG.getint('google_ip', 'auto_adjust_scan_ip_thread_num') except: pass try: self.user_special.use_ipv6 = config.CONFIG.getint('google_ip', 'use_ipv6') except: pass self.user_special.proxy_enable = self.USER_CONFIG.get('proxy', 'enable') self.user_special.proxy_type = self.USER_CONFIG.get('proxy', 'type') self.user_special.proxy_host = self.USER_CONFIG.get('proxy', 'host') self.user_special.proxy_port = self.USER_CONFIG.get('proxy', 'port') self.user_special.proxy_user = self.USER_CONFIG.get('proxy', 'user') self.user_special.proxy_passwd = self.USER_CONFIG.get('proxy', 'passwd') except Exception as e: xlog.warn("User_config.load except:%s", e)
def test2(self): work_ciphers = ["AES128-SHA"] for cipher in self.cipher_list: if cipher in work_ciphers: continue else: work_ciphers.append(cipher) xlog.debug("%s", cipher) cipher_suites = (work_ciphers) openssl_context = SSLConnection.context_builder(ca_certs=g_cacertfile, cipher_suites=cipher_suites) try: ssl, _, _ = connect_ssl(self.ip, openssl_context=openssl_context) server_type = test_server_type(ssl, self.ip) xlog.debug("%s", server_type) if "gws" not in server_type: work_ciphers.remove(cipher) except Exception as e: xlog.warn("err:%s", e) try: work_ciphers.remove(cipher) except: pass work_str = "" for cipher in work_ciphers: work_str += cipher + ":" xlog.info("work ciphers:%s", work_str)
def _request(self, method, host, path="/", headers={}, data="", timeout=40): try: response = self.http_dispatcher.request(method, host, path, dict(headers), data, timeout=timeout) status = response.status if status != 200: xlog.warn("front request %s %s%s fail, status:%d", method, host, path, status) content = response.task.read_all() # xlog.debug("%s %s%s trace:%s", method, response.ssl_sock.host, path, response.task.get_trace()) return content, status, response except Exception as e: xlog.exception("front request %s %s%s fail:%r", method, host, path, e) return "", 500, {}
def request(headers={}, payload=None): max_retry = 3 for i in range(max_retry): ssl_sock = None try: ssl_sock = https_manager.get_ssl_connection() if not ssl_sock: xlog.debug('create_ssl_connection fail') continue if ssl_sock.host == '': ssl_sock.appid = appid_manager.get_appid() if not ssl_sock.appid: raise GAE_Exception(1, "no appid can use") headers['Host'] = ssl_sock.appid + ".appspot.com" ssl_sock.host = headers['Host'] else: headers['Host'] = ssl_sock.host response = _request(ssl_sock, headers, payload) if not response: ssl_sock.close() continue response.ssl_sock = ssl_sock return response except Exception as e: xlog.warn('request failed:%s', e) if ssl_sock: ssl_sock.close() raise GAE_Exception(2, "try max times")
def request(headers={}, payload=None): max_retry = 3 for i in range(max_retry): ssl_sock = None try: ssl_sock = https_manager.get_ssl_connection() if not ssl_sock: xlog.debug("create_ssl_connection fail") continue if ssl_sock.host == "": ssl_sock.appid = appid_manager.get_appid() if not ssl_sock.appid: raise GAE_Exception(1, "no appid can use") headers["Host"] = ssl_sock.appid + ".appspot.com" ssl_sock.host = headers["Host"] else: headers["Host"] = ssl_sock.host response = _request(ssl_sock, headers, payload) if not response: ssl_sock.close() continue response.ssl_sock = ssl_sock return response except Exception as e: xlog.warn("request failed:%s", e) if ssl_sock: ssl_sock.close() raise GAE_Exception(2, "try max times")
def keep_alive_thread(self): while self.keep_alive: time.sleep(1) try: sock_list = self.new_conn_pool.get_need_keep_alive(maxtime=self.keep_alive-3) for ssl_sock in sock_list: ssl_sock.close() sock_list = self.gae_conn_pool.get_need_keep_alive(maxtime=self.keep_alive-3) for ssl_sock in sock_list: # only keep little alive link. # if you have 25 appid, you can keep 5 alive link. if self.gae_conn_pool.qsize() > max(1, len(appid_manager.working_appid_list)/2): ssl_sock.close() continue #inactive_time = time.time() -ssl_sock.last_use_time #logging.debug("inactive_time:%d", inactive_time) if self.head_request(ssl_sock): self.save_ssl_connection_for_reuse(ssl_sock) else: ssl_sock.close() self.create_more_connection() except Exception as e: xlog.warn("keep alive except:%r", e)
def check_win10(): if sys.platform != "win32": return False import ctypes class OSVERSIONINFOEXW(ctypes.Structure): _fields_ = [('dwOSVersionInfoSize', ctypes.c_ulong), ('dwMajorVersion', ctypes.c_ulong), ('dwMinorVersion', ctypes.c_ulong), ('dwBuildNumber', ctypes.c_ulong), ('dwPlatformId', ctypes.c_ulong), ('szCSDVersion', ctypes.c_wchar*128), ('wServicePackMajor', ctypes.c_ushort), ('wServicePackMinor', ctypes.c_ushort), ('wSuiteMask', ctypes.c_ushort), ('wProductType', ctypes.c_byte), ('wReserved', ctypes.c_byte)] os_version = OSVERSIONINFOEXW() os_version.dwOSVersionInfoSize = ctypes.sizeof(os_version) retcode = ctypes.windll.Ntdll.RtlGetVersion(ctypes.byref(os_version)) if retcode != 0: xlog.warn("Failed to get win32 OS version") return False if os_version.dwMajorVersion == 10: xlog.info("detect Win10, enable connect concurent control.") return True return False
def check_win10(): if sys.platform != "win32": return False import ctypes class OSVERSIONINFOEXW(ctypes.Structure): _fields_ = [ ("dwOSVersionInfoSize", ctypes.c_ulong), ("dwMajorVersion", ctypes.c_ulong), ("dwMinorVersion", ctypes.c_ulong), ("dwBuildNumber", ctypes.c_ulong), ("dwPlatformId", ctypes.c_ulong), ("szCSDVersion", ctypes.c_wchar * 128), ("wServicePackMajor", ctypes.c_ushort), ("wServicePackMinor", ctypes.c_ushort), ("wSuiteMask", ctypes.c_ushort), ("wProductType", ctypes.c_byte), ("wReserved", ctypes.c_byte), ] os_version = OSVERSIONINFOEXW() os_version.dwOSVersionInfoSize = ctypes.sizeof(os_version) retcode = ctypes.windll.Ntdll.RtlGetVersion(ctypes.byref(os_version)) if retcode != 0: xlog.warn("Failed to get win32 OS version") return False if os_version.dwMajorVersion == 10: xlog.info("detect Win10, enable connect concurent control.") return True return False
def fetch(method, host, path, headers, payload, bufsize=8192): request_data = "%s %s HTTP/1.1\r\n" % (method, path) request_data += "".join("%s: %s\r\n" % (k, v) for k, v in headers.items()) request_data += "\r\n" ssl_sock = https_manager.get_ssl_connection(host) if not ssl_sock: return ssl_sock.send(request_data.encode()) payload_len = len(payload) start = 0 while start < payload_len: send_size = min(payload_len - start, 65535) sended = ssl_sock.send(payload[start : start + send_size]) start += sended response = httplib.HTTPResponse(ssl_sock, buffering=True) response.ssl_sock = ssl_sock try: orig_timeout = ssl_sock.gettimeout() ssl_sock.settimeout(90) response.begin() ssl_sock.settimeout(orig_timeout) except httplib.BadStatusLine as e: xlog.warn("direct_handler.fetch bad status line:%r", e) response = None except Exception as e: xlog.warn("direct_handler.fetch:%r", e) return response
def load(self): ConfigParser.RawConfigParser.OPTCRE = re.compile(r"(?P<option>[^=\s][^=]*)\s*(?P<vi>[=])\s*(?P<value>.*)$") self.DEFAULT_CONFIG = ConfigParser.ConfigParser() DEFAULT_CONFIG_FILENAME = os.path.abspath(os.path.join(current_path, "proxy.ini")) self.USER_CONFIG = ConfigParser.ConfigParser() CONFIG_USER_FILENAME = os.path.abspath(os.path.join(root_path, "data", "gae_proxy", "config.ini")) try: if os.path.isfile(DEFAULT_CONFIG_FILENAME): self.DEFAULT_CONFIG.read(DEFAULT_CONFIG_FILENAME) self.user_special.scan_ip_thread_num = self.DEFAULT_CONFIG.getint("google_ip", "max_scan_ip_thread_num") else: return if os.path.isfile(CONFIG_USER_FILENAME): self.USER_CONFIG.read(CONFIG_USER_FILENAME) else: return try: self.user_special.appid = self.USER_CONFIG.get("gae", "appid") self.user_special.password = self.USER_CONFIG.get("gae", "password") except: pass try: self.user_special.host_appengine_mode = self.USER_CONFIG.get("hosts", "appengine.google.com") except: pass try: self.user_special.scan_ip_thread_num = config.CONFIG.getint("google_ip", "max_scan_ip_thread_num") except: self.user_special.scan_ip_thread_num = self.DEFAULT_CONFIG.getint("google_ip", "max_scan_ip_thread_num") try: self.user_special.auto_adjust_scan_ip_thread_num = config.CONFIG.getint( "google_ip", "auto_adjust_scan_ip_thread_num" ) except: pass try: self.user_special.use_ipv6 = config.CONFIG.getint("google_ip", "use_ipv6") except: pass self.user_special.proxy_enable = self.USER_CONFIG.get("proxy", "enable") self.user_special.proxy_type = self.USER_CONFIG.get("proxy", "type") self.user_special.proxy_host = self.USER_CONFIG.get("proxy", "host") self.user_special.proxy_port = self.USER_CONFIG.get("proxy", "port") self.user_special.proxy_user = self.USER_CONFIG.get("proxy", "user") self.user_special.proxy_passwd = self.USER_CONFIG.get("proxy", "passwd") except Exception as e: xlog.warn("User_config.load except:%s", e)
def save(self): CONFIG_USER_FILENAME = os.path.abspath( os.path.join(root_path, 'data', 'gae_proxy', 'config.ini')) try: f = open(CONFIG_USER_FILENAME, 'w') if self.user_special.appid != "": f.write("[gae]\n") f.write("appid = %s\n" % self.user_special.appid) f.write("password = %s\n\n" % self.user_special.password) f.write("[proxy]\n") f.write("enable = %s\n" % self.user_special.proxy_enable) f.write("type = %s\n" % self.user_special.proxy_type) f.write("host = %s\n" % self.user_special.proxy_host) f.write("port = %s\n" % self.user_special.proxy_port) f.write("user = %s\n" % self.user_special.proxy_user) f.write("passwd = %s\n\n" % self.user_special.proxy_passwd) if self.user_special.host_appengine_mode != "gae": f.write("[hosts]\n") f.write("appengine.google.com = %s\n" % self.user_special.host_appengine_mode) f.write("www.google.com = %s\n\n" % self.user_special.host_appengine_mode) f.write("[google_ip]\n") if self.user_special.ip_connect_interval != self.DEFAULT_CONFIG.getint( 'google_ip', 'ip_connect_interval'): f.write("ip_connect_interval = %d\n" % int(self.user_special.ip_connect_interval)) if int(self.user_special.auto_adjust_scan_ip_thread_num ) != self.DEFAULT_CONFIG.getint( 'google_ip', 'auto_adjust_scan_ip_thread_num'): f.write("auto_adjust_scan_ip_thread_num = %d\n\n" % int(self.user_special.auto_adjust_scan_ip_thread_num)) if int(self.user_special. scan_ip_thread_num) != self.DEFAULT_CONFIG.getint( 'google_ip', 'max_scan_ip_thread_num'): f.write("max_scan_ip_thread_num = %d\n\n" % int(self.user_special.scan_ip_thread_num)) if int(self.user_special.use_ipv6) != self.DEFAULT_CONFIG.getint( 'google_ip', 'use_ipv6'): f.write("use_ipv6 = %d\n\n" % int(self.user_special.use_ipv6)) f.write("[connect_manager]\n") if int(self.user_special. connect_interval) != self.DEFAULT_CONFIG.getint( 'connect_manager', 'connect_interval'): f.write("connect_interval = %d\n\n" % int(self.user_special.connect_interval)) f.close() except: xlog.warn("launcher.config save user config fail:%s", CONFIG_USER_FILENAME)
def request(self, method, host, schema="http", path="/", headers={}, data="", timeout=40): # change top domain to xx-net.net # this domain bypass the cloudflare front for ipv4 #p = host.find(".") #host_sub = host[:p] #host = host_sub + ".xx-net.net" schema = "http" # force schema to http, avoid cert fail on heroku curl. # and all x-server provide ipv4 access url = schema + "://" + host + path payloads = ['%s %s HTTP/1.1\r\n' % (method, url)] for k in headers: v = headers[k] payloads.append('%s: %s\r\n' % (k, v)) head_payload = "".join(payloads) request_body = '%s%s%s%s' % \ ((struct.pack('!H', len(head_payload)), head_payload, struct.pack('!I', len(data)), data)) request_headers = {'Content-Length': len(data), 'Content-Type': 'application/octet-stream'} heroku_host = "" content, status, response = self._request( "POST", heroku_host, "/2/", request_headers, request_body, timeout) # xlog.info('%s "PHP %s %s %s" %s %s', handler.address_string(), handler.command, url, handler.protocol_version, response.status, response.getheader('Content-Length', '-')) # xlog.debug("status:%d", status) if status == 200: xlog.debug("%s %s%s trace:%s", method, host, path, response.task.get_trace()) self.last_success_time = time.time() self.continue_fail_num = 0 self.success_num += 1 else: if status == 404: heroku_host = response.ssl_sock.host xlog.warn("heroku:%s fail", heroku_host) try: self.host_manager.remove(heroku_host) except: pass self.last_fail_time = time.time() self.continue_fail_num += 1 self.fail_num += 1 try: res = simple_http_client.TxtResponse(content) except: return "", 501, {} res.worker = response.worker res.task = response.task return res.body, res.status, res
def runJob(self): while self.check_num < 1000000: try: time.sleep(1) ip_int = ip_range.get_ip() #ip_int = ip_range.random_get_ip() ip_str = ip_utils.ip_num_to_string(ip_int) self.check_ip(ip_str) except Exception as e: xlog.warn("google_ip.runJob fail:%s", e)
def req_deploy_handler(self): global deploy_proc req = urlparse.urlparse(self.path).query reqs = urlparse.parse_qs(req, keep_blank_values=True) data = '' log_path = os.path.abspath(os.path.join(current_path, os.pardir, "server", 'upload.log')) time_now = datetime.datetime.today().strftime('%H:%M:%S-%a/%d/%b/%Y') if reqs['cmd'] == ['deploy']: appid = self.postvars['appid'][0] if deploy_proc and deploy_proc.poll() == None: xlog.warn("deploy is running, request denied.") data = '{"res":"deploy is running", "time":"%s"}' % (time_now) else: try: if os.path.isfile(log_path): os.remove(log_path) script_path = os.path.abspath(os.path.join(current_path, os.pardir, "server", 'uploader.py')) email = self.postvars['email'][0] passwd = self.postvars['passwd'][0] rc4_passwd = self.postvars['rc4_passwd'][0] deploy_proc = subprocess.Popen([sys.executable, script_path, appid, email, passwd, rc4_passwd]) xlog.info("deploy begin.") data = '{"res":"success", "time":"%s"}' % time_now except Exception as e: data = '{"res":"%s", "time":"%s"}' % (e, time_now) elif reqs['cmd'] == ['cancel']: if deploy_proc and deploy_proc.poll() == None: deploy_proc.kill() data = '{"res":"deploy is killed", "time":"%s"}' % (time_now) else: data = '{"res":"deploy is not running", "time":"%s"}' % (time_now) elif reqs['cmd'] == ['get_log']: if deploy_proc and os.path.isfile(log_path): with open(log_path, "r") as f: content = f.read() else: content = "" status = 'init' if deploy_proc: if deploy_proc.poll() == None: status = 'running' else: status = 'finished' data = json.dumps({'status':status,'log':content, 'time':time_now}) self.send_response('text/html', data)
def req_deploy_handler(self): global deploy_proc req = urlparse.urlparse(self.path).query reqs = urlparse.parse_qs(req, keep_blank_values=True) data = "" log_path = os.path.abspath(os.path.join(current_path, os.pardir, "server", "upload.log")) time_now = datetime.datetime.today().strftime("%H:%M:%S-%a/%d/%b/%Y") if reqs["cmd"] == ["deploy"]: appid = self.postvars["appid"][0] if deploy_proc and deploy_proc.poll() == None: xlog.warn("deploy is running, request denied.") data = '{"res":"deploy is running", "time":"%s"}' % (time_now) else: try: if os.path.isfile(log_path): os.remove(log_path) script_path = os.path.abspath(os.path.join(current_path, os.pardir, "server", "uploader.py")) email = self.postvars["email"][0] passwd = self.postvars["passwd"][0] rc4_passwd = self.postvars["rc4_passwd"][0] deploy_proc = subprocess.Popen([sys.executable, script_path, appid, email, passwd, rc4_passwd]) xlog.info("deploy begin.") data = '{"res":"success", "time":"%s"}' % time_now except Exception as e: data = '{"res":"%s", "time":"%s"}' % (e, time_now) elif reqs["cmd"] == ["cancel"]: if deploy_proc and deploy_proc.poll() == None: deploy_proc.kill() data = '{"res":"deploy is killed", "time":"%s"}' % (time_now) else: data = '{"res":"deploy is not running", "time":"%s"}' % (time_now) elif reqs["cmd"] == ["get_log"]: if deploy_proc and os.path.isfile(log_path): with open(log_path, "r") as f: content = f.read() else: content = "" status = "init" if deploy_proc: if deploy_proc.poll() == None: status = "running" else: status = "finished" data = json.dumps({"status": status, "log": content, "time": time_now}) self.send_response("text/html", data)
def check(self, callback=None, check_ca=True, close_ssl=True): ssl_sock = None try: ssl_sock, self.result.connct_time, self.result.handshake_time = connect_ssl( self.ip, timeout=self.timeout, openssl_context=self.openssl_context) # verify SSL certificate issuer. def check_ssl_cert(ssl_sock): cert = ssl_sock.get_peer_certificate() if not cert: #raise HoneypotError(' certficate is none') raise SSLError("no cert") issuer_commonname = next( (v for k, v in cert.get_issuer().get_components() if k == 'CN'), '') ssl_cert = cert_util.SSLCert(cert) xlog.info("%s CN:%s", self.ip, ssl_cert.cn) self.result.domain = ssl_cert.cn if check_ca: check_ssl_cert(ssl_sock) if callback: return callback(ssl_sock, self.ip) return True except SSLError as e: xlog.debug("Check_appengine %s SSLError:%s", self.ip, e) pass except IOError as e: xlog.warn("Check %s IOError:%s", self.ip, e) pass except httplib.BadStatusLine: #logging.debug('Check_appengine http.bad status line ip:%s', ip) #import traceback #traceback.print_exc() pass except Exception as e: if len(e.args) > 0: errno_str = e.args[0] else: errno_str = e.message xlog.exception('check_appengine %s %s err:%s', self.ip, errno_str, e) finally: if ssl_sock and close_ssl: ssl_sock.close() return False
def test(self): for cipher in self.cipher_list: xlog.debug("%s", cipher) openssl_context = SSLConnection.context_builder(ca_certs=g_cacertfile, cipher_suites=(cipher,)) try: ssl, _, _ = connect_ssl(self.ip, openssl_context=openssl_context) server_type = test_server_type(ssl, self.ip) xlog.debug("%s", server_type) except Exception as e: xlog.warn("err:%s", e)
def check_all_domain(check_ip): with open(os.path.join(current_path, "front_domains.json"), "r") as fd: content = fd.read() cs = json.loads(content) for host in cs: host = "scan1." + host res = check_ip.check_ip(ip, host=host, wait_time=wait_time) if not res or not res.ok: xlog.warn("host:%s fail", host) else: xlog.info("host:%s ok", host)
def test_gws(ip_str): xlog.info("==>%s", ip_str) check = Check_frame(ip_str) result = check.check(callback=test_server_type, check_ca=True) if not result or not "gws" in result: xlog.warn("Server:%s not gws", result) return False check.result.server_type = result return check.result
def check(self, callback=None, check_ca=True, close_ssl=True): ssl_sock = None try: ssl_sock,self.result.connct_time,self.result.handshake_time = connect_ssl(self.ip, timeout=self.timeout, openssl_context=self.openssl_context) # verify SSL certificate issuer. def check_ssl_cert(ssl_sock): cert = ssl_sock.get_peer_certificate() if not cert: #raise HoneypotError(' certficate is none') raise SSLError("no cert") issuer_commonname = next((v for k, v in cert.get_issuer().get_components() if k == 'CN'), '') if self.check_cert and not issuer_commonname.startswith('Google'): raise HoneypotError(' certficate is issued by %r, not Google' % ( issuer_commonname)) ssl_cert = cert_util.SSLCert(cert) xlog.info("%s CN:%s", self.ip, ssl_cert.cn) self.result.domain = ssl_cert.cn if check_ca: check_ssl_cert(ssl_sock) if callback: return callback(ssl_sock, self.ip) return True except HoneypotError as e: xlog.warn("honeypot %s", self.ip) raise e except SSLError as e: xlog.debug("Check_appengine %s SSLError:%s", self.ip, e) pass except IOError as e: xlog.warn("Check %s IOError:%s", self.ip, e) pass except httplib.BadStatusLine: #logging.debug('Check_appengine http.bad status line ip:%s', ip) #import traceback #traceback.print_exc() pass except Exception as e: if len(e.args)>0: errno_str = e.args[0] else: errno_str = e.message xlog.exception('check_appengine %s %s err:%s', self.ip, errno_str, e) finally: if ssl_sock and close_ssl: ssl_sock.close() return False
def handle(self): #xlog.info('Connected from %r', self.client_address) while True: try: self.close_connection = 1 self.handle_one_request() except Exception as e: xlog.warn("handle err:%r close", e) self.close_connection = 1 if self.close_connection: break self.connection.close()
def head_request(self, ssl_sock): if ssl_sock.host == '': ssl_sock.appid = appid_manager.get_appid() if not ssl_sock.appid: xlog.error("no appid can use") return False host = ssl_sock.appid + ".appspot.com" ssl_sock.host = host else: host = ssl_sock.host # public appid don't keep alive, for quota limit. if ssl_sock.appid.startswith( "xxnet-") and ssl_sock.appid[7:].isdigit(): #logging.info("public appid don't keep alive") #self.keep_alive = 0 return False #logging.debug("head request %s", host) request_data = 'HEAD /_gh/ HTTP/1.1\r\nHost: %s\r\n\r\n' % host response = None try: ssl_sock.settimeout(10) ssl_sock.sock.settimeout(10) data = request_data.encode() ret = ssl_sock.send(data) if ret != len(data): xlog.warn("head send len:%d %d", ret, len(data)) response = httplib.HTTPResponse(ssl_sock, buffering=True) response.begin() status = response.status if status != 200: xlog.debug("app head fail status:%d", status) raise Exception("app check fail") return True except httplib.BadStatusLine as e: inactive_time = time.time() - ssl_sock.last_use_time xlog.debug("%s keep alive fail, time:%d", ssl_sock.ip, inactive_time) return False except Exception as e: xlog.debug("head request fail:%r", e) return False finally: if response: response.close()
def add_ip(self, ip_str, handshake_time, domain=None, server='', fail_times=0): if not isinstance(ip_str, basestring): xlog.error("add_ip input") return if config.USE_IPV6 and ":" not in ip_str: xlog.warn("add %s but ipv6", ip_str) return handshake_time = int(handshake_time) self.ip_lock.acquire() try: if ip_str in self.ip_dict: self.ip_dict[ip_str]['handshake_time'] = handshake_time self.ip_dict[ip_str]['fail_times'] = fail_times self.ip_dict[ip_str]['fail_time'] = 0 self.append_ip_history(ip_str, handshake_time) return False self.iplist_need_save = 1 self.good_ip_num += 1 self.ip_dict[ip_str] = { 'handshake_time': handshake_time, "fail_times": fail_times, "transfered_data": 0, 'data_active': 0, 'domain': domain, 'server': server, "history": [[time.time(), handshake_time]], "fail_time": 0, "success_time": 0, "get_time": 0, "links": 0 } if 'gws' in server: self.gws_ip_list.append(ip_str) return True except Exception as e: xlog.exception("add_ip err:%s", e) finally: self.ip_lock.release() return False
def report_not_exist(self, appid): xlog.warn("APPID_manager, report_not_exist %s", appid) self.lock.acquire() try: config.GAE_APPIDS.remove(appid) self.not_exist_appids.append(appid) self.working_appid_list.remove(appid) except: pass finally: self.lock.release() if len(self.working_appid_list) == 0: self.reset_appid()
def save(self): CONFIG_USER_FILENAME = os.path.abspath(os.path.join(root_path, "data", "gae_proxy", "config.ini")) try: f = open(CONFIG_USER_FILENAME, "w") if self.user_special.appid != "": f.write("[gae]\n") f.write("appid = %s\n" % self.user_special.appid) f.write("password = %s\n\n" % self.user_special.password) f.write("[proxy]\n") f.write("enable = %s\n" % self.user_special.proxy_enable) f.write("type = %s\n" % self.user_special.proxy_type) f.write("host = %s\n" % self.user_special.proxy_host) f.write("port = %s\n" % self.user_special.proxy_port) f.write("user = %s\n" % self.user_special.proxy_user) f.write("passwd = %s\n\n" % self.user_special.proxy_passwd) if self.user_special.host_appengine_mode != "gae": f.write("[hosts]\n") f.write("appengine.google.com = %s\n" % self.user_special.host_appengine_mode) f.write("www.google.com = %s\n\n" % self.user_special.host_appengine_mode) f.write("[google_ip]\n") if self.user_special.ip_connect_interval != self.DEFAULT_CONFIG.getint("google_ip", "ip_connect_interval"): f.write("ip_connect_interval = %d\n" % int(self.user_special.ip_connect_interval)) if int(self.user_special.auto_adjust_scan_ip_thread_num) != self.DEFAULT_CONFIG.getint( "google_ip", "auto_adjust_scan_ip_thread_num" ): f.write( "auto_adjust_scan_ip_thread_num = %d\n\n" % int(self.user_special.auto_adjust_scan_ip_thread_num) ) if int(self.user_special.scan_ip_thread_num) != self.DEFAULT_CONFIG.getint( "google_ip", "max_scan_ip_thread_num" ): f.write("max_scan_ip_thread_num = %d\n\n" % int(self.user_special.scan_ip_thread_num)) if int(self.user_special.use_ipv6) != self.DEFAULT_CONFIG.getint("google_ip", "use_ipv6"): f.write("use_ipv6 = %d\n\n" % int(self.user_special.use_ipv6)) f.write("[connect_manager]\n") if int(self.user_special.connect_interval) != self.DEFAULT_CONFIG.getint( "connect_manager", "connect_interval" ): f.write("connect_interval = %d\n\n" % int(self.user_special.connect_interval)) f.close() except: xlog.warn("launcher.config save user config fail:%s", CONFIG_USER_FILENAME)
def head_request(self, ssl_sock): if ssl_sock.host == '': ssl_sock.appid = appid_manager.get_appid() if not ssl_sock.appid: xlog.error("no appid can use") return False host = ssl_sock.appid + ".appspot.com" ssl_sock.host = host else: host = ssl_sock.host # public appid don't keep alive, for quota limit. if ssl_sock.appid.startswith("xxnet-") and ssl_sock.appid[7:].isdigit(): #logging.info("public appid don't keep alive") #self.keep_alive = 0 return False #logging.debug("head request %s", host) request_data = 'HEAD /_gh/ HTTP/1.1\r\nHost: %s\r\n\r\n' % host response = None try: ssl_sock.settimeout(10) ssl_sock.sock.settimeout(10) data = request_data.encode() ret = ssl_sock.send(data) if ret != len(data): xlog.warn("head send len:%d %d", ret, len(data)) response = httplib.HTTPResponse(ssl_sock, buffering=True) response.begin() status = response.status if status != 200: xlog.debug("app head fail status:%d", status) raise Exception("app check fail") return True except httplib.BadStatusLine as e: inactive_time = time.time() - ssl_sock.last_use_time xlog.debug("%s keep alive fail, time:%d", ssl_sock.ip, inactive_time) return False except Exception as e: xlog.debug("head request fail:%r", e) return False finally: if response: response.close()
def handle_one_request(self): try: try: self.raw_requestline = self.rfile.readline(65537) except Exception as e: #xlog.warn("simple server handle except %r", e) return if len(self.raw_requestline) > 65536: xlog.warn("recv command line too large") return if not self.raw_requestline: #xlog.warn("closed") return self.parse_request() if self.command == "GET": self.do_GET() elif self.command == "POST": self.do_POST() elif self.command == "CONNECT": self.do_CONNECT() elif self.command == "HEAD": self.do_HEAD() elif self.command == "DELETE": self.do_DELETE() elif self.command == "OPTIONS": self.do_OPTIONS() elif self.command == "PUT": self.do_PUT() else: xlog.warn("unhandler cmd:%s", self.command) return self.wfile.flush( ) #actually send the response if not already done. self.close_connection = 0 except socket.error as e: xlog.warn("socket error:%r", e) except IOError as e: if e.errno == errno.EPIPE: xlog.warn("PIPE error:%r", e) else: xlog.warn("IOError:%r", e) #except OpenSSL.SSL.SysCallError as e: # xlog.warn("socket error:%r", e) except Exception as e: xlog.exception("handler:%r", e)
def do_GET(self): xlog.info('PAC from:%s %s %s ', self.address_string(), self.command, self.path) path = urlparse.urlparse(self.path).path # '/proxy.pac' filename = os.path.normpath('./' + path) # proxy.pac if self.path.startswith(('http://', 'https://')): data = b'HTTP/1.1 200\r\nCache-Control: max-age=86400\r\nExpires:Oct, 01 Aug 2100 00:00:00 GMT\r\nConnection: close\r\n' if filename.endswith(('.jpg', '.gif', '.jpeg', '.bmp')): data += b'Content-Type: image/gif\r\n\r\n' + self.onepixel else: data += b'\r\n This is the Pac server, not proxy port, use 8087 as proxy port.' self.wfile.write(data) xlog.info('%s "%s %s HTTP/1.1" 200 -', self.address_string(), self.command, self.path) return # check for '..', which will leak file if re.search(r'(\.{2})', self.path) is not None: self.wfile.write(b'HTTP/1.1 404\r\n\r\n') xlog.warn('%s %s %s haking', self.address_string(), self.command, self.path) return if filename != 'proxy.pac': xlog.warn("pac_server GET %s fail", filename) self.wfile.write(b'HTTP/1.1 404\r\n\r\n') return mimetype = 'text/plain' if self.path.endswith('.pac?flush') or time.time() - os.path.getmtime( get_serving_pacfile()) > config.PAC_EXPIRED: thread.start_new_thread(PacUtil.update_pacfile, (user_pacfile, )) pac_filename = get_serving_pacfile() with open(pac_filename, 'rb') as fp: data = fp.read() host = self.headers.getheader('Host') host, _, port = host.rpartition(":") gae_proxy_proxy = host + ":" + str(config.LISTEN_PORT) pac_proxy = host + ":" + str(config.PAC_PORT) data = data.replace(gae_proxy_listen, gae_proxy_proxy) data = data.replace(pac_listen, pac_proxy) self.wfile.write( ('HTTP/1.1 200\r\nContent-Type: %s\r\nContent-Length: %s\r\n\r\n' % (mimetype, len(data))).encode()) self.wfile.write(data)
def handle_one_request(self): try: try: self.raw_requestline = self.rfile.readline(65537) except Exception as e: #xlog.warn("simple server handle except %r", e) return if len(self.raw_requestline) > 65536: xlog.warn("recv command line too large") return if not self.raw_requestline: #xlog.warn("closed") return self.parse_request() if self.command == "GET": self.do_GET() elif self.command == "POST": self.do_POST() elif self.command == "CONNECT": self.do_CONNECT() elif self.command == "HEAD": self.do_HEAD() elif self.command == "DELETE": self.do_DELETE() elif self.command == "OPTIONS": self.do_OPTIONS() elif self.command == "PUT": self.do_PUT() else: xlog.warn("unhandler cmd:%s", self.command) return self.wfile.flush() #actually send the response if not already done. self.close_connection = 0 except socket.error as e: xlog.warn("socket error:%r", e) except IOError as e: if e.errno == errno.EPIPE: xlog.warn("PIPE error:%r", e) else: xlog.warn("IOError:%r", e) #except OpenSSL.SSL.SysCallError as e: # xlog.warn("socket error:%r", e) except Exception as e: xlog.exception("handler:%r", e)
def serve_forever(self): fdset = [self.socket, ] while self.running: r, w, e = select.select(fdset, [], [], 1) if self.socket in r: try: (sock, address) = self.socket.accept() except IOError as e: xlog.warn("socket(%s:%s) accept fail(errno: %s).", self.server_address[0], self.server_address[1], e.args[0]) if e.args[0] == 10022: xlog.info("server %s:%d restarted.", self.server_address[0], self.server_address[1]) self.init_socket() fdset = [self.socket, ] return self.process_connect(sock, address)
def send_file(self, filename, mimetype): file_size = os.path.getsize(filename) try: tme = (datetime.datetime.today()+datetime.timedelta(minutes=330)).strftime('%a, %d %b %Y %H:%M:%S GMT') self.wfile.write(('HTTP/1.1 200\r\nAccess-Control-Allow-Origin: *\r\nCache-Control:public, max-age=31536000\r\nExpires: %s\r\nContent-Type: %s\r\nContent-Length: %s\r\n\r\n' % (tme, mimetype, file_size)).encode()) with open(filename, 'rb') as fp: while True: data = fp.read(65535) if not data: break self.wfile.write(data) except: #self.wfile.write(b'HTTP/1.1 404\r\nContent-Type: text/plain\r\nConnection: close\r\n\r\n404 Open file fail') xlog.warn("download broken")
def _request(self, method, host, path="/", headers={}, data="", timeout=40): try: response = self.http_dispatcher.request(method, host, path, dict(headers), data, timeout=timeout) if not response: return "", 500, {} status = response.status if status != 200: xlog.warn("front request %s %s%s fail, status:%d", method, host, path, status) content = response.task.read_all() # xlog.debug("%s %s%s trace:%s", method, response.ssl_sock.host, path, response.task.get_trace()) return content, status, response except Exception as e: xlog.exception("front request %s %s%s fail:%r", method, host, path, e) return "", 500, {}
def do_METHOD(self): touch_active() host = self.headers.get('Host', '') host_ip, _, port = host.rpartition(':') if host_ip == "127.0.0.1" and port == str(config.LISTEN_PORT): controler = web_control.ControlHandler(self.client_address, self.headers, self.command, self.path, self.rfile, self.wfile) if self.command == "GET": return controler.do_GET() elif self.command == "POST": return controler.do_POST() else: xlog.warn("method not defined: %s", self.command) return if self.path[0] == '/' and host: self.path = 'http://%s%s' % (host, self.path) elif not host and '://' in self.path: host = urlparse.urlparse(self.path).netloc if host.startswith("127.0.0.1") or host.startswith("localhost"): xlog.warn("Your browser forward localhost to proxy.") return self.forward_local() self.parsed_url = urlparse.urlparse(self.path) if host in config.HOSTS_GAE: return self.do_AGENT() if host in config.HOSTS_FWD or host in config.HOSTS_DIRECT: return self.wfile.write( ('HTTP/1.1 301\r\nLocation: %s\r\n\r\n' % self.path.replace('http://', 'https://', 1)).encode()) if host.endswith(config.HOSTS_GAE_ENDSWITH): return self.do_AGENT() if host.endswith(config.HOSTS_FWD_ENDSWITH) or host.endswith( config.HOSTS_DIRECT_ENDSWITH): return self.wfile.write( ('HTTP/1.1 301\r\nLocation: %s\r\n\r\n' % self.path.replace('http://', 'https://', 1)).encode()) return self.do_AGENT()
def network_is_ok(): global checking_lock, checking_num, network_ok, last_check_time, check_network_interval if time.time() - last_check_time < check_network_interval: return network_ok if checking_num > 0: return network_ok if config.PROXY_ENABLE: socket.socket = socks.socksocket xlog.debug("patch socks") checking_lock.acquire() checking_num += 1 checking_lock.release() try: conn = httplib.HTTPSConnection("code.jquery.com", 443, timeout=30) header = { "user-agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Safari/537.36", "accept": "application/json, text/javascript, */*; q=0.01", "accept-encoding": "gzip, deflate, sdch", "accept-language": 'en-US,en;q=0.8,ja;q=0.6,zh-CN;q=0.4,zh;q=0.2', "connection": "keep-alive" } conn.request("HEAD", "/", headers=header) response = conn.getresponse() if response.status: xlog.debug("network is ok") network_ok = True last_check_time = time.time() return True except: pass finally: checking_lock.acquire() checking_num -= 1 checking_lock.release() if config.PROXY_ENABLE: socket.socket = default_socket xlog.debug("restore socket") xlog.warn("network fail.") network_ok = False last_check_time = time.time() return False
def create_more_connection(self): need_conn_num = self.connection_pool_min_num - self.new_conn_pool.qsize() target_thread_num = min(self.max_thread_num, need_conn_num) for i in range(0, target_thread_num): if not connect_control.allow_connect(): xlog.warn("create more connect, control not allow") time.sleep(10) continue if self.thread_num > self.max_thread_num: break self.thread_num_lock.acquire() self.thread_num += 1 self.thread_num_lock.release() p = threading.Thread(target=self.create_connection_worker) p.start()
def do_CONNECT_FWD(self): """socket forward for http CONNECT command""" host, _, port = self.path.rpartition(':') port = int(port) xlog.info('FWD %s %s:%d ', self.command, host, port) if host == "appengine.google.com" or host == "www.google.com": connected_in_s = 5 # gae_proxy upload to appengine is slow, it need more 'fresh' connection. else: connected_in_s = 10 # gws connect can be used after tcp connection created 15 s try: self.wfile.write(b'HTTP/1.1 200 OK\r\n\r\n') data = self.connection.recv(1024) except Exception as e: xlog.exception('do_CONNECT_FWD (%r, %r) Exception:%s', host, port, e) self.connection.close() return remote = forwork_manager.create_connection(host=host, port=port, sock_life=connected_in_s) if remote is None: self.connection.close() xlog.warn('FWD %s %s:%d create_connection fail', self.command, host, port) return try: if data: remote.send(data) except Exception as e: xlog.exception('do_CONNECT_FWD (%r, %r) Exception:%s', host, port, e) self.connection.close() remote.close() return # reset timeout default to avoid long http upload failure, but it will delay timeout retry :( remote.settimeout(None) forwork_manager.forward_socket(self.connection, remote, bufsize=self.bufsize) xlog.debug('FWD %s %s:%d with closed', self.command, host, port)
def create_more_connection(self): need_conn_num = self.connection_pool_min_num - self.new_conn_pool.qsize( ) target_thread_num = min(self.max_thread_num, need_conn_num) for i in range(0, target_thread_num): if not connect_control.allow_connect(): xlog.warn("create more connect, control not allow") time.sleep(10) continue if self.thread_num > self.max_thread_num: break self.thread_num_lock.acquire() self.thread_num += 1 self.thread_num_lock.release() p = threading.Thread(target=self.create_connection_worker) p.start()
def send_file(self, filename, mimetype): file_size = os.path.getsize(filename) try: tme = (datetime.datetime.today() + datetime.timedelta(minutes=330) ).strftime('%a, %d %b %Y %H:%M:%S GMT') self.wfile.write(( 'HTTP/1.1 200\r\nAccess-Control-Allow-Origin: *\r\nCache-Control:public, max-age=31536000\r\nExpires: %s\r\nContent-Type: %s\r\nContent-Length: %s\r\n\r\n' % (tme, mimetype, file_size)).encode()) with open(filename, 'rb') as fp: while True: data = fp.read(65535) if not data: break self.wfile.write(data) except: #self.wfile.write(b'HTTP/1.1 404\r\nContent-Type: text/plain\r\nConnection: close\r\n\r\n404 Open file fail') xlog.warn("download broken")
def checker(self): while True: try: ip = self.get_ip() except Exception as e: xlog.info("no ip left") return try: res = self.check_ip.check_ip(ip) except Exception as e: xlog.warn("check except:%r", e) continue if not res or not res.ok: xlog.debug("ip:%s fail", ip) continue self.write_ip(ip, res.domain, res.handshake_time)
def update_ip(self, ip_str, handshake_time): if not isinstance(ip_str, basestring): xlog.error("set_ip input") return handshake_time = int(handshake_time) if handshake_time < 5: # that's impossible xlog.warn("%s handshake:%d impossible", ip_str, 1000 * handshake_time) return self.ip_lock.acquire() try: if ip_str in self.ip_dict: time_now = time.time() # Case: some good ip, average handshake time is 300ms # some times ip package lost cause handshake time become 2000ms # this ip will not return back to good ip front until all become bad # There for, prevent handshake time increase too quickly. org_time = self.ip_dict[ip_str]['handshake_time'] if handshake_time - org_time > 500: self.ip_dict[ip_str]['handshake_time'] = org_time + 500 else: self.ip_dict[ip_str]['handshake_time'] = handshake_time self.ip_dict[ip_str]['success_time'] = time_now if self.ip_dict[ip_str]['fail_times'] > 0: self.good_ip_num += 1 self.ip_dict[ip_str]['fail_times'] = 0 self.append_ip_history(ip_str, handshake_time) self.ip_dict[ip_str]["fail_time"] = 0 self.iplist_need_save = 1 #logging.debug("update ip:%s not exist", ip_str) except Exception as e: xlog.error("update_ip err:%s", e) finally: self.ip_lock.release() self.save_ip_list()