def load_ip(self): if os.path.isfile(self.good_ip_file): file_path = self.good_ip_file else: file_path = self.default_good_ip_file with open(file_path, "r") as fd: lines = fd.readlines() for line in lines: try: if line.startswith("#"): continue str_l = line.split(' ') if len(str_l) < 4: xlog.warning("line err: %s", line) continue ip = str_l[0] domain = str_l[1] server = str_l[2] handshake_time = int(str_l[3]) if len(str_l) > 4: fail_times = int(str_l[4]) else: fail_times = 0 #logging.info("load ip: %s time:%d domain:%s server:%s", ip, handshake_time, domain, server) self.add_ip(ip, handshake_time, domain, server, fail_times) except Exception as e: xlog.exception("load_ip line:%s err:%s", line, e) xlog.info("load google ip_list num:%d, gws num:%d", len(self.ip_dict), len(self.gws_ip_list)) self.try_sort_gws_ip(force=True)
def remove_slowest_ip(self): if len(self.gws_ip_list) <= self.max_good_ip_num: return self.try_sort_gws_ip(force=True) self.ip_lock.acquire() try: ip_num = len(self.gws_ip_list) while ip_num > self.max_good_ip_num: ip_str = self.gws_ip_list[ip_num - 1] property = self.ip_dict[ip_str] server = property['server'] fails = property['fail_times'] handshake_time = property['handshake_time'] xlog.info("remove_slowest_ip:%s handshake_time:%d, fails:%d", ip_str, handshake_time, fails) del self.ip_dict[ip_str] if 'gws' in server and ip_str in self.gws_ip_list: self.gws_ip_list.remove(ip_str) ip_num -= 1 except Exception as e: xlog.exception("remove_slowest_ip err:%s", e) finally: self.ip_lock.release()
def scan_ip_worker(self): while self.scan_thread_count <= self.scan_ip_thread_num and connect_control.keep_running: if not connect_control.allow_scan(): time.sleep(10) continue try: time.sleep(1) ip = self.ip_range.get_ip() if ip in self.ip_dict: continue connect_control.start_connect_register() result = check_ip.test_gae_ip(ip) connect_control.end_connect_register() if not result: continue if self.add_ip(ip, result.handshake_time, result.domain, "gws"): #logging.info("add %s CN:%s type:%s time:%d gws:%d ", ip, # result.domain, result.server_type, result.handshake_time, len(self.gws_ip_list)) xlog.info("scan_ip add ip:%s time:%d", ip, result.handshake_time) scan_ip_log.info("Add %s time:%d CN:%s ", ip, result.handshake_time, result.domain) self.remove_slowest_ip() self.save_ip_list() except Exception as e: xlog.exception("google_ip.runJob fail:%r", e) self.scan_thread_lock.acquire() self.scan_thread_count -= 1 self.scan_thread_lock.release() xlog.info("scan_ip_worker exit")
def scan_ip_worker(self): while self.searching_thread_count <= self.scan_ip_thread_num and connect_control.keep_running: if not connect_control.allow_scan(): time.sleep(10) continue try: time.sleep(1) ip_int = ip_range.get_ip() ip_str = ip_utils.ip_num_to_string(ip_int) if ip_str in self.ip_dict: continue connect_control.start_connect_register() result = check_ip.test_gae_ip(ip_str) connect_control.end_connect_register() if not result: continue if self.add_ip(ip_str, result.handshake_time, result.domain, "gws"): #logging.info("add %s CN:%s type:%s time:%d gws:%d ", ip_str, # result.domain, result.server_type, result.handshake_time, len(self.gws_ip_list)) xlog.info("scan_ip add ip:%s time:%d", ip_str, result.handshake_time) scan_ip_log.info("Add %s time:%d CN:%s ", ip_str, result.handshake_time, result.domain) self.remove_slowest_ip() self.save_ip_list() except Exception as e: xlog.exception("google_ip.runJob fail:%r", e) self.ncount_lock.acquire() self.searching_thread_count -= 1 self.ncount_lock.release() xlog.info("scan_ip_worker exit")
def do_POST(self): try: refer = self.headers.getheader('Referer') netloc = urlparse.urlparse(refer).netloc if not netloc.startswith("127.0.0.1") and not netloc.startswitch("localhost"): xlog.warn("web control ref:%s refuse", netloc) return except: pass xlog.debug ('GAEProxy web_control %s %s %s ', self.address_string(), self.command, self.path) try: ctype, pdict = cgi.parse_header(self.headers.getheader('content-type')) if ctype == 'multipart/form-data': self.postvars = cgi.parse_multipart(self.rfile, pdict) elif ctype == 'application/x-www-form-urlencoded': length = int(self.headers.getheader('content-length')) self.postvars = urlparse.parse_qs(self.rfile.read(length), keep_blank_values=1) else: self.postvars = {} except: self.postvars = {} path = urlparse.urlparse(self.path).path if path == '/deploy': return self.req_deploy_handler() elif path == "/config": return self.req_config_handler() elif path == "/scan_ip": return self.req_scan_ip_handler() elif path.startswith("/importip"): return self.req_importip_handler() else: self.wfile.write(b'HTTP/1.1 404\r\nContent-Type: text/plain\r\nConnection: close\r\n\r\n404 Not Found') xlog.info('%s "%s %s HTTP/1.1" 404 -', self.address_string(), self.command, self.path)
def remove_ip_process(self): try: while connect_control.keep_running: try: ip_str = self.to_remove_ip_list.get_nowait() except: break result = check_ip.test(ip_str) if result and result.appspot_ok: self.add_ip(ip_str, result.handshake_time, result.domain, result.server_type) xlog.debug("remove ip process, restore ip:%s", ip_str) continue if not check_ip.network_is_ok(): self.to_remove_ip_list.put(ip_str) xlog.warn("network is unreachable. check your network connection.") return xlog.info("real remove ip:%s ", ip_str) self.iplist_need_save = 1 finally: self.remove_ip_thread_num_lock.acquire() self.remove_ip_thread_num -= 1 self.remove_ip_thread_num_lock.release()
def adjust_scan_thread_num(self, max_scan_ip_thread_num=None): if max_scan_ip_thread_num: self.max_scan_ip_thread_num = max_scan_ip_thread_num if not self.auto_adjust_scan_ip_thread_num: scan_ip_thread_num = self.max_scan_ip_thread_num elif len(self.gws_ip_list) < 100: scan_ip_thread_num = self.max_scan_ip_thread_num else: try: the_100th_ip = self.gws_ip_list[99] the_100th_handshake_time = self.ip_dict[the_100th_ip][ 'handshake_time'] scan_ip_thread_num = int((the_100th_handshake_time - 200) / 2 * self.max_scan_ip_thread_num / 50) except Exception as e: xlog.warn("adjust_scan_thread_num fail:%r", e) return if scan_ip_thread_num > self.max_scan_ip_thread_num: scan_ip_thread_num = self.max_scan_ip_thread_num if scan_ip_thread_num != self.scan_ip_thread_num: xlog.info("Adjust scan thread num from %d to %d", self.scan_ip_thread_num, scan_ip_thread_num) self.scan_ip_thread_num = scan_ip_thread_num self.search_more_google_ip()
def import_mac_ca(common_name, certfile): commonname = "GoAgent XX-Net" #TODO: need check again ca_hash = CertUtil.ca_thumbprint.replace(':', '') def get_exist_ca_sha1(): args = ['security', 'find-certificate', '-Z', '-a', '-c', commonname] output = subprocess.check_output(args) for line in output.splitlines(True): if len(line) == 53 and line.startswith("SHA-1 hash:"): sha1_hash = line[12:52] return sha1_hash exist_ca_sha1 = get_exist_ca_sha1() if exist_ca_sha1 == ca_hash: xlog.info("GoAgent CA exist") return import_command = 'security add-trusted-cert -d -r trustRoot -k /Library/Keychains/System.keychain ../../data/gae_proxy/CA.crt'# % certfile.decode('utf-8') if exist_ca_sha1: delete_ca_command = 'security delete-certificate -Z %s' % exist_ca_sha1 exec_command = "%s;%s" % (delete_ca_command, import_command) else: exec_command = import_command admin_command = """osascript -e 'do shell script "%s" with administrator privileges' """ % exec_command cmd = admin_command.encode('utf-8') xlog.info("try auto import CA command:%s", cmd) os.system(cmd)
def context_builder(ca_certs=None, cipher_suites=('ALL:!RC4-SHA:!ECDHE-RSA-RC4-SHA:!ECDHE-RSA-AES128-GCM-SHA256:!AES128-GCM-SHA256',)): # 'ALL', '!aNULL', '!eNULL' global ssl_version if not ssl_version: if hasattr(OpenSSL.SSL, "TLSv1_2_METHOD"): ssl_version = "TLSv1_2" elif hasattr(OpenSSL.SSL, "TLSv1_1_METHOD"): ssl_version = "TLSv1_1" elif hasattr(OpenSSL.SSL, "TLSv1_METHOD"): ssl_version = "TLSv1" else: ssl_version = "SSLv23" if sys.platform == "darwin": ssl_version = "TLSv1" # freenas openssl support fix from twitter user "himanzero" # https://twitter.com/himanzero/status/645231724318748672 if sys.platform == "freebsd9": ssl_version = "TLSv1" xlog.info("SSL use version:%s", ssl_version) protocol_version = getattr(OpenSSL.SSL, '%s_METHOD' % ssl_version) ssl_context = OpenSSL.SSL.Context(protocol_version) if ca_certs: ssl_context.load_verify_locations(os.path.abspath(ca_certs)) ssl_context.set_verify(OpenSSL.SSL.VERIFY_PEER, lambda c, x, e, d, ok: ok) else: ssl_context.set_verify(OpenSSL.SSL.VERIFY_NONE, lambda c, x, e, d, ok: ok) ssl_context.set_cipher_list(':'.join(cipher_suites)) return ssl_context
def remove_ip_process(self): try: while connect_control.keep_running: try: ip_str = self.to_remove_ip_list.get_nowait() except: break result = check_ip.test(ip_str) if result and result.appspot_ok: self.add_ip(ip_str, result.handshake_time, result.domain, result.server_type) xlog.debug("remove ip process, restore ip:%s", ip_str) continue if not check_ip.network_is_ok(): self.to_remove_ip_list.put(ip_str) xlog.warn( "network is unreachable. check your network connection." ) return xlog.info("real remove ip:%s ", ip_str) self.iplist_need_save = 1 finally: self.remove_ip_thread_num_lock.acquire() self.remove_ip_thread_num -= 1 self.remove_ip_thread_num_lock.release()
def create_ca(): key = OpenSSL.crypto.PKey() key.generate_key(OpenSSL.crypto.TYPE_RSA, 2048) req = OpenSSL.crypto.X509Req() subj = req.get_subject() subj.countryName = 'CN' subj.stateOrProvinceName = 'Internet' subj.localityName = 'Cernet' subj.organizationName = CertUtil.ca_vendor subj.organizationalUnitName = '%s Root' % CertUtil.ca_vendor subj.commonName = '%s XX-Net' % CertUtil.ca_vendor #TODO: here should be GoAgent req.set_pubkey(key) req.sign(key, CertUtil.ca_digest) ca = OpenSSL.crypto.X509() ca.set_version(2) ca.set_serial_number(0) ca.gmtime_adj_notBefore(0) ca.gmtime_adj_notAfter(24 * 60 * 60 * 3652) ca.set_issuer(req.get_subject()) ca.set_subject(req.get_subject()) ca.set_pubkey(req.get_pubkey()) ca.add_extensions([ OpenSSL.crypto.X509Extension('basicConstraints', False, 'CA:TRUE', ca, ca) ]) ca.sign(key, CertUtil.ca_digest) #logging.debug("CA key:%s", key) xlog.info("create ca") return key, ca
def import_mac_ca(common_name, certfile): commonname = "GoAgent XX-Net" #TODO: need check again ca_hash = CertUtil.ca_thumbprint.replace(':', '') def get_exist_ca_sha1(): args = [ 'security', 'find-certificate', '-Z', '-a', '-c', commonname ] output = subprocess.check_output(args) for line in output.splitlines(True): if len(line) == 53 and line.startswith("SHA-1 hash:"): sha1_hash = line[12:52] return sha1_hash exist_ca_sha1 = get_exist_ca_sha1() if exist_ca_sha1 == ca_hash: xlog.info("GoAgent CA exist") return import_command = 'security add-trusted-cert -d -r trustRoot -k /Library/Keychains/System.keychain ../../data/gae_proxy/CA.crt' # % certfile.decode('utf-8') if exist_ca_sha1: delete_ca_command = 'security delete-certificate -Z %s' % exist_ca_sha1 exec_command = "%s;%s" % (delete_ca_command, import_command) else: exec_command = import_command admin_command = """osascript -e 'do shell script "%s" with administrator privileges' """ % exec_command cmd = admin_command.encode('utf-8') xlog.info("try auto import CA command:%s", cmd) os.system(cmd)
def report_connect_fail(self, ip_str, force_remove=False): self.ip_lock.acquire() try: time_now = time.time() if not ip_str in self.ip_dict: return self.ip_dict[ip_str]["links"] -= 1 # ignore if system network is disconnected. if not force_remove: if not check_ip.network_is_ok(): xlog.debug("report_connect_fail network fail") # connect_control.fall_into_honeypot() return fail_time = self.ip_dict[ip_str]["fail_time"] if not force_remove and time_now - fail_time < 1: xlog.debug("fail time too near") return # increase handshake_time to make it can be used in lower probability self.ip_dict[ip_str]["handshake_time"] += 300 if self.ip_dict[ip_str]["fail_times"] == 0: self.good_ip_num -= 1 self.ip_dict[ip_str]["fail_times"] += 1 self.append_ip_history(ip_str, "fail") self.ip_dict[ip_str]["fail_time"] = time_now if force_remove or self.ip_dict[ip_str]["fail_times"] >= 50: property = self.ip_dict[ip_str] server = property["server"] del self.ip_dict[ip_str] if "gws" in server and ip_str in self.gws_ip_list: self.gws_ip_list.remove(ip_str) if not force_remove: self.to_remove_ip_list.put(ip_str) self.try_remove_thread() xlog.info( "remove ip tmp:%s left amount:%d gws_num:%d", ip_str, len(self.ip_dict), len(self.gws_ip_list) ) else: xlog.info( "remove ip:%s left amount:%d gws_num:%d", ip_str, len(self.ip_dict), len(self.gws_ip_list) ) if self.good_ip_num > len(self.ip_dict): self.good_ip_num = len(self.ip_dict) self.iplist_need_save = 1 except Exception as e: xlog.exception("set_ip err:%s", e) finally: self.ip_lock.release() if not self.is_ip_enough(): self.search_more_google_ip()
def load_ip(self): if os.path.isfile(self.good_ip_file): file_path = self.good_ip_file else: file_path = self.default_good_ip_file with open(file_path, "r") as fd: lines = fd.readlines() for line in lines: try: if line.startswith("#"): continue str_l = line.split(' ') if len(str_l) < 4: xlog.warning("line err: %s", line) continue ip_str = str_l[0] domain = str_l[1] server = str_l[2] handshake_time = int(str_l[3]) if len(str_l) > 4: fail_times = int(str_l[4]) else: fail_times = 0 #logging.info("load ip: %s time:%d domain:%s server:%s", ip_str, handshake_time, domain, server) self.add_ip(ip_str, handshake_time, domain, server, fail_times) except Exception as e: xlog.exception("load_ip line:%s err:%s", line, e) xlog.info("load google ip_list num:%d, gws num:%d", len(self.ip_dict), len(self.gws_ip_list)) self.try_sort_gws_ip(force=True)
def create_ca(): key = OpenSSL.crypto.PKey() key.generate_key(OpenSSL.crypto.TYPE_RSA, 2048) req = OpenSSL.crypto.X509Req() subj = req.get_subject() subj.countryName = 'CN' subj.stateOrProvinceName = 'Internet' subj.localityName = 'Cernet' subj.organizationName = CertUtil.ca_vendor subj.organizationalUnitName = '%s Root' % CertUtil.ca_vendor subj.commonName = '%s XX-Net' % CertUtil.ca_vendor #TODO: here should be GoAgent req.set_pubkey(key) req.sign(key, CertUtil.ca_digest) ca = OpenSSL.crypto.X509() ca.set_version(2) ca.set_serial_number(0) ca.gmtime_adj_notBefore(0) ca.gmtime_adj_notAfter(24 * 60 * 60 * 3652) ca.set_issuer(req.get_subject()) ca.set_subject(req.get_subject()) ca.set_pubkey(req.get_pubkey()) ca.add_extensions([ OpenSSL.crypto.X509Extension( 'basicConstraints', False, 'CA:TRUE', ca, ca) ]) ca.sign(key, CertUtil.ca_digest) #logging.debug("CA key:%s", key) xlog.info("create ca") return key, ca
def import_debian_ca(common_name, ca_file): def get_debian_ca_sha1(nss_path): commonname = "GoAgent XX-Net - GoAgent" #TODO: here should be GoAgent - XX-Net cmd = [ 'certutil', '-L', '-d', 'sql:%s' % nss_path, '-n', commonname ] lines = get_cmd_out(cmd) get_sha1_title = False sha1 = "" for line in lines: if line.endswith("Fingerprint (SHA1):\n"): get_sha1_title = True continue if get_sha1_title: sha1 = line break sha1 = sha1.replace(' ', '').replace(':', '').replace('\n', '') if len(sha1) != 40: return False else: return sha1 home_path = os.path.expanduser("~") nss_path = os.path.join(home_path, ".pki/nssdb") if not os.path.isdir(nss_path): return False if not any( os.path.isfile('%s/certutil' % x) for x in os.environ['PATH'].split(os.pathsep)): xlog.warning( 'please install *libnss3-tools* package to import GoAgent root ca' ) return False sha1 = get_debian_ca_sha1(nss_path) ca_hash = CertUtil.ca_thumbprint.replace(':', '') if sha1 == ca_hash: xlog.info("system cert exist") return # shell command to list all cert # certutil -L -d sql:$HOME/.pki/nssdb # remove old cert first cmd_line = 'certutil -L -d sql:$HOME/.pki/nssdb |grep "GoAgent" && certutil -d sql:$HOME/.pki/nssdb -D -n "%s" ' % ( common_name) os.system(cmd_line) # install new cert cmd_line = 'certutil -d sql:$HOME/.pki/nssdb -A -t "C,," -n "%s" -i "%s"' % ( common_name, ca_file) os.system(cmd_line) return True
def report_connect_fail(self, ip, force_remove=False): self.ip_lock.acquire() try: time_now = time.time() if not ip in self.ip_dict: xlog.debug("report_connect_fail %s not exist", ip) return if force_remove: if self.ip_dict[ip]['fail_times'] == 0: self.good_ip_num -= 1 del self.ip_dict[ip] if ip in self.gws_ip_list: self.gws_ip_list.remove(ip) xlog.info("remove ip:%s left amount:%d gws_num:%d", ip, len(self.ip_dict), len(self.gws_ip_list)) return self.ip_dict[ip]['links'] -= 1 # ignore if system network is disconnected. if check_ip.network_stat == "Fail": xlog.debug("report_connect_fail network fail") return check_ip.continue_fail_count += 1 if check_ip.continue_fail_count > 10: check_ip.network_stat = "unknown" xlog.debug("report_connect_fail continue_fail_count:%d", check_ip.continue_fail_count) check_ip.triger_check_network() return fail_time = self.ip_dict[ip]["fail_time"] if time_now - fail_time < 1: xlog.debug("fail time too near %s", ip) return if self.ip_dict[ip]['fail_times'] == 0: self.good_ip_num -= 1 self.ip_dict[ip]['fail_times'] += 1 self.append_ip_history(ip, "fail") self.ip_dict[ip]["fail_time"] = time_now check_ip.triger_check_network() self.to_check_ip_queue.put((ip, time_now + 10)) xlog.info("report_connect_fail:%s", ip) except Exception as e: xlog.exception("report_connect_fail err:%s", e) finally: self.iplist_need_save = 1 self.ip_lock.release() if not self.is_ip_enough(): self.search_more_google_ip()
def report_connect_fail(self, ip_str, force_remove=False): self.ip_lock.acquire() try: time_now = time.time() if not ip_str in self.ip_dict: return self.ip_dict[ip_str]['links'] -= 1 # ignore if system network is disconnected. if not force_remove: if not check_ip.network_is_ok(): xlog.debug("report_connect_fail network fail") #connect_control.fall_into_honeypot() return fail_time = self.ip_dict[ip_str]["fail_time"] if not force_remove and time_now - fail_time < 1: xlog.debug("fail time too near") return # increase handshake_time to make it can be used in lower probability #self.ip_dict[ip_str]['handshake_time'] += 300 if self.ip_dict[ip_str]['fail_times'] == 0: self.good_ip_num -= 1 self.ip_dict[ip_str]['fail_times'] += 1 self.append_ip_history(ip_str, "fail") self.ip_dict[ip_str]["fail_time"] = time_now if force_remove or self.ip_dict[ip_str]['fail_times'] >= 50: property = self.ip_dict[ip_str] server = property['server'] del self.ip_dict[ip_str] if 'gws' in server and ip_str in self.gws_ip_list: self.gws_ip_list.remove(ip_str) if not force_remove: self.to_remove_ip_list.put(ip_str) self.try_remove_thread() xlog.info("remove ip tmp:%s left amount:%d gws_num:%d", ip_str, len(self.ip_dict), len(self.gws_ip_list)) else: xlog.info("remove ip:%s left amount:%d gws_num:%d", ip_str, len(self.ip_dict), len(self.gws_ip_list)) if self.good_ip_num > len(self.ip_dict): self.good_ip_num = len(self.ip_dict) self.iplist_need_save = 1 except Exception as e: xlog.exception("set_ip err:%s", e) finally: self.ip_lock.release() if not self.is_ip_enough(): self.search_more_google_ip()
def roll_log(self): for i in range(1000): file_name = os.path.join(config.DATA_PATH, "scan_ip.%d.log" % i) if os.path.isfile(file_name): continue xlog.info("scan_ip_log roll %s -> %s", self.log_path, file_name) shutil.move(self.log_path, file_name) return
def get_connect_interval(): if sys.platform != "win32": return 0 win_version = env_info.win32_version() if win_version == 10: xlog.info("detect Win10, enable connect concurent control, interval:%d", config.connect_interval) return config.connect_interval return 0
def generate_ca_file(): xlog.info("generate CA file:%s", CertUtil.ca_keyfile) key, ca = CertUtil.create_ca() with open(CertUtil.ca_keyfile, 'wb') as fp: fp.write( OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, ca)) fp.write( OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, key))
def req_deploy_handler(self): global deploy_proc req = urlparse.urlparse(self.path).query reqs = urlparse.parse_qs(req, keep_blank_values=True) data = '' log_path = os.path.abspath(os.path.join(current_path, os.pardir, "server", 'upload.log')) time_now = datetime.datetime.today().strftime('%H:%M:%S-%a/%d/%b/%Y') if reqs['cmd'] == ['deploy']: appid = self.postvars['appid'][0] if deploy_proc and deploy_proc.poll() == None: xlog.warn("deploy is running, request denied.") data = '{"res":"deploy is running", "time":"%s"}' % (time_now) else: try: if os.path.isfile(log_path): os.remove(log_path) script_path = os.path.abspath(os.path.join(current_path, os.pardir, "server", 'uploader.py')) email = self.postvars['email'][0] passwd = self.postvars['passwd'][0] rc4_passwd = self.postvars['rc4_passwd'][0] deploy_proc = subprocess.Popen([sys.executable, script_path, appid, email, passwd, rc4_passwd]) xlog.info("deploy begin.") data = '{"res":"success", "time":"%s"}' % time_now except Exception as e: data = '{"res":"%s", "time":"%s"}' % (e, time_now) elif reqs['cmd'] == ['cancel']: if deploy_proc and deploy_proc.poll() == None: deploy_proc.kill() data = '{"res":"deploy is killed", "time":"%s"}' % (time_now) else: data = '{"res":"deploy is not running", "time":"%s"}' % (time_now) elif reqs['cmd'] == ['get_log']: if deploy_proc and os.path.isfile(log_path): with open(log_path, "r") as f: content = f.read() else: content = "" status = 'init' if deploy_proc: if deploy_proc.poll() == None: status = 'running' else: status = 'finished' data = json.dumps({'status':status,'log':content, 'time':time_now}) self.send_response('text/html', data)
def get_connect_interval(): if sys.platform != "win32": return 0 win_version = env_info.win32_version() if win_version == 10: xlog.info( "detect Win10, enable connect concurent control, interval:%d", config.connect_interval) return config.connect_interval return 0
def import_debian_ca(common_name, ca_file): def get_debian_ca_sha1(nss_path): commonname = "GoAgent XX-Net - GoAgent" #TODO: here should be GoAgent - XX-Net cmd = ['certutil', '-L','-d', 'sql:%s' % nss_path, '-n', commonname] lines = get_cmd_out(cmd) get_sha1_title = False sha1 = "" for line in lines: if line.endswith("Fingerprint (SHA1):\n"): get_sha1_title = True continue if get_sha1_title: sha1 = line break sha1 = sha1.replace(' ', '').replace(':', '').replace('\n', '') if len(sha1) != 40: return False else: return sha1 home_path = os.path.expanduser("~") nss_path = os.path.join(home_path, ".pki/nssdb") if not os.path.isdir(nss_path): return False if not any(os.path.isfile('%s/certutil' % x) for x in os.environ['PATH'].split(os.pathsep)): xlog.warning('please install *libnss3-tools* package to import GoAgent root ca') return False sha1 = get_debian_ca_sha1(nss_path) ca_hash = CertUtil.ca_thumbprint.replace(':', '') if sha1 == ca_hash: xlog.info("system cert exist") return # shell command to list all cert # certutil -L -d sql:$HOME/.pki/nssdb # remove old cert first cmd_line = 'certutil -L -d sql:$HOME/.pki/nssdb |grep "GoAgent" && certutil -d sql:$HOME/.pki/nssdb -D -n "%s" ' % ( common_name) os.system(cmd_line) # install new cert cmd_line = 'certutil -d sql:$HOME/.pki/nssdb -A -t "C,," -n "%s" -i "%s"' % (common_name, ca_file) os.system(cmd_line) return True
def do_GET(self): xlog.info('PAC from:%s %s %s ', self.address_string(), self.command, self.path) path = urlparse.urlparse(self.path).path # '/proxy.pac' filename = os.path.normpath('./' + path) # proxy.pac if self.path.startswith(('http://', 'https://')): data = b'HTTP/1.1 200\r\nCache-Control: max-age=86400\r\nExpires:Oct, 01 Aug 2100 00:00:00 GMT\r\nConnection: close\r\n' if filename.endswith(('.jpg', '.gif', '.jpeg', '.bmp')): data += b'Content-Type: image/gif\r\n\r\n' + self.onepixel else: data += b'\r\n This is the Pac server, not proxy port, use 8087 as proxy port.' self.wfile.write(data) xlog.info('%s "%s %s HTTP/1.1" 200 -', self.address_string(), self.command, self.path) return # check for '..', which will leak file if re.search(r'(\.{2})', self.path) is not None: self.wfile.write(b'HTTP/1.1 404\r\n\r\n') xlog.warn('%s %s %s haking', self.address_string(), self.command, self.path) return if filename != 'proxy.pac': xlog.warn("pac_server GET %s fail", filename) self.wfile.write(b'HTTP/1.1 404\r\n\r\n') return mimetype = 'text/plain' if self.path.endswith('.pac?flush') or time.time() - os.path.getmtime( get_serving_pacfile()) > config.PAC_EXPIRED: thread.start_new_thread(PacUtil.update_pacfile, (user_pacfile, )) pac_filename = get_serving_pacfile() with open(pac_filename, 'rb') as fp: data = fp.read() host = self.headers.getheader('Host') host, _, port = host.rpartition(":") gae_proxy_proxy = host + ":" + str(config.LISTEN_PORT) pac_proxy = host + ":" + str(config.PAC_PORT) data = data.replace(gae_proxy_listen, gae_proxy_proxy) data = data.replace(pac_listen, pac_proxy) self.wfile.write( ('HTTP/1.1 200\r\nContent-Type: %s\r\nContent-Length: %s\r\n\r\n' % (mimetype, len(data))).encode()) self.wfile.write(data)
def load_range_content(self): if os.path.isfile(user_range_file): self.range_file = user_range_file else: self.range_file = default_range_file xlog.info("load ip range file:%s", self.range_file) fd = open(self.range_file, "r") if not fd: xlog.error("load ip range %s fail", self.range_file) return content = fd.read() fd.close() return content
def do_CONNECT_FWD(self): """socket forward for http CONNECT command""" host, _, port = self.path.rpartition(':') port = int(port) xlog.info('FWD %s %s:%d ', self.command, host, port) if host == "appengine.google.com" or host == "www.google.com": connected_in_s = 5 # gae_proxy upload to appengine is slow, it need more 'fresh' connection. else: connected_in_s = 10 # gws connect can be used after tcp connection created 15 s try: self.wfile.write(b'HTTP/1.1 200 OK\r\n\r\n') data = self.connection.recv(1024) except Exception as e: xlog.exception('do_CONNECT_FWD (%r, %r) Exception:%s', host, port, e) self.connection.close() return remote = forwork_manager.create_connection(host=host, port=port, sock_life=connected_in_s) if remote is None: self.connection.close() xlog.warn('FWD %s %s:%d create_connection fail', self.command, host, port) return try: if data: remote.send(data) except Exception as e: xlog.exception('do_CONNECT_FWD (%r, %r) Exception:%s', host, port, e) self.connection.close() remote.close() return # reset timeout default to avoid long http upload failure, but it will delay timeout retry :( remote.settimeout(None) forwork_manager.forward_socket(self.connection, remote, bufsize=self.bufsize) xlog.debug('FWD %s %s:%d with closed', self.command, host, port)
def init_ca(): #Check Certs Dir if not os.path.exists(CertUtil.ca_certdir): os.makedirs(CertUtil.ca_certdir) # Confirmed GoAgent CA exist if not os.path.exists(CertUtil.ca_keyfile): xlog.info("no CA file exist") xlog.info("clean old site certs") any( os.remove(x) for x in glob.glob(CertUtil.ca_certdir + '/*.crt') + glob.glob(CertUtil.ca_certdir + '/.*.crt')) if os.name == 'nt': CertUtil.remove_windows_ca('%s CA' % CertUtil.ca_vendor) CertUtil.generate_ca_file() # Load GoAgent CA with open(CertUtil.ca_keyfile, 'rb') as fp: CertUtil.ca_thumbprint = OpenSSL.crypto.load_certificate( OpenSSL.crypto.FILETYPE_PEM, fp.read()).digest('sha1') #Check exist site cert buffer with CA certfiles = glob.glob(CertUtil.ca_certdir + '/*.crt') + glob.glob(CertUtil.ca_certdir + '/.*.crt') if certfiles: filename = random.choice(certfiles) commonname = os.path.splitext(os.path.basename(filename))[0] with open(filename, 'rb') as fp: serial_number = OpenSSL.crypto.load_certificate( OpenSSL.crypto.FILETYPE_PEM, fp.read()).get_serial_number() if serial_number != CertUtil.get_cert_serial_number(commonname): any(os.remove(x) for x in certfiles) CertUtil.import_ca(CertUtil.ca_keyfile) # change the status, # web_control /cert_import_status will return True, else return False # launcher will wait ready to open browser and check update config.cert_import_ready = True
def do_GET(self): xlog.info('PAC from:%s %s %s ', self.address_string(), self.command, self.path) path = urlparse.urlparse(self.path).path # '/proxy.pac' filename = os.path.normpath('./' + path) # proxy.pac if self.path.startswith(('http://', 'https://')): data = b'HTTP/1.1 200\r\nCache-Control: max-age=86400\r\nExpires:Oct, 01 Aug 2100 00:00:00 GMT\r\nConnection: close\r\n' if filename.endswith(('.jpg', '.gif', '.jpeg', '.bmp')): data += b'Content-Type: image/gif\r\n\r\n' + self.onepixel else: data += b'\r\n This is the Pac server, not proxy port, use 8087 as proxy port.' self.wfile.write(data) xlog.info('%s "%s %s HTTP/1.1" 200 -', self.address_string(), self.command, self.path) return # check for '..', which will leak file if re.search(r'(\.{2})', self.path) is not None: self.wfile.write(b'HTTP/1.1 404\r\n\r\n') xlog.warn('%s %s %s haking', self.address_string(), self.command, self.path ) return if filename != 'proxy.pac': xlog.warn("pac_server GET %s fail", filename) self.wfile.write(b'HTTP/1.1 404\r\n\r\n') return mimetype = 'text/plain' if self.path.endswith('.pac?flush') or time.time() - os.path.getmtime(get_serving_pacfile()) > config.PAC_EXPIRED: thread.start_new_thread(PacUtil.update_pacfile, (user_pacfile,)) pac_filename = get_serving_pacfile() with open(pac_filename, 'rb') as fp: data = fp.read() host = self.headers.getheader('Host') host, _, port = host.rpartition(":") gae_proxy_proxy = host + ":" + str(config.LISTEN_PORT) pac_proxy = host + ":" + str(config.PAC_PORT) data = data.replace(gae_proxy_listen, gae_proxy_proxy) data = data.replace(pac_listen, pac_proxy) self.wfile.write(('HTTP/1.1 200\r\nContent-Type: %s\r\nContent-Length: %s\r\n\r\n' % (mimetype, len(data))).encode()) self.wfile.write(data)
def do_POST(self): try: refer = self.headers.getheader('Referer') netloc = urlparse.urlparse(refer).netloc if not netloc.startswith("127.0.0.1") and not netloc.startswitch( "localhost"): xlog.warn("web control ref:%s refuse", netloc) return except: pass xlog.debug('GAEProxy web_control %s %s %s ', self.address_string(), self.command, self.path) try: ctype, pdict = cgi.parse_header( self.headers.getheader('content-type')) if ctype == 'multipart/form-data': self.postvars = cgi.parse_multipart(self.rfile, pdict) elif ctype == 'application/x-www-form-urlencoded': length = int(self.headers.getheader('content-length')) self.postvars = urlparse.parse_qs(self.rfile.read(length), keep_blank_values=1) else: self.postvars = {} except: self.postvars = {} path = urlparse.urlparse(self.path).path if path == '/deploy': return self.req_deploy_handler() elif path == "/config": return self.req_config_handler() elif path == "/scan_ip": return self.req_scan_ip_handler() elif path.startswith("/importip"): return self.req_importip_handler() else: self.wfile.write( b'HTTP/1.1 404\r\nContent-Type: text/plain\r\nConnection: close\r\n\r\n404 Not Found' ) xlog.info('%s "%s %s HTTP/1.1" 404 -', self.address_string(), self.command, self.path)
def adjust_scan_thread_num(self): if not self.auto_adjust_scan_ip_thread_num: scan_ip_thread_num = self.max_scan_ip_thread_num elif len(self.gws_ip_list) < 100: scan_ip_thread_num = self.max_scan_ip_thread_num else: try: the_100th_ip = self.gws_ip_list[99] the_100th_handshake_time = self.ip_dict[the_100th_ip]['handshake_time'] scan_ip_thread_num = int( (the_100th_handshake_time - 200)/2 * self.max_scan_ip_thread_num/50 ) except Exception as e: xlog.warn("adjust_scan_thread_num fail:%r", e) return if scan_ip_thread_num > self.max_scan_ip_thread_num: scan_ip_thread_num = self.max_scan_ip_thread_num if scan_ip_thread_num != self.scan_ip_thread_num: xlog.info("Adjust scan thread num from %d to %d", self.scan_ip_thread_num, scan_ip_thread_num) self.scan_ip_thread_num = scan_ip_thread_num self.search_more_google_ip()
def context_builder( ca_certs=None, cipher_suites=( 'ALL:!RC4-SHA:!ECDHE-RSA-RC4-SHA:!ECDHE-RSA-AES128-GCM-SHA256:!AES128-GCM-SHA256', )): # 'ALL', '!aNULL', '!eNULL' global ssl_version if not ssl_version: if hasattr(OpenSSL.SSL, "TLSv1_2_METHOD"): ssl_version = "TLSv1_2" elif hasattr(OpenSSL.SSL, "TLSv1_1_METHOD"): ssl_version = "TLSv1_1" elif hasattr(OpenSSL.SSL, "TLSv1_METHOD"): ssl_version = "TLSv1" else: ssl_version = "SSLv23" if sys.platform == "darwin": ssl_version = "TLSv1" # freenas openssl support fix from twitter user "himanzero" # https://twitter.com/himanzero/status/645231724318748672 if sys.platform == "freebsd9": ssl_version = "TLSv1" xlog.info("SSL use version:%s", ssl_version) protocol_version = getattr(OpenSSL.SSL, '%s_METHOD' % ssl_version) ssl_context = OpenSSL.SSL.Context(protocol_version) if ca_certs: ssl_context.load_verify_locations(os.path.abspath(ca_certs)) ssl_context.set_verify(OpenSSL.SSL.VERIFY_PEER, lambda c, x, e, d, ok: ok) else: ssl_context.set_verify(OpenSSL.SSL.VERIFY_NONE, lambda c, x, e, d, ok: ok) ssl_context.set_cipher_list(':'.join(cipher_suites)) return ssl_context
def init_ca(): #Check Certs Dir if not os.path.exists(CertUtil.ca_certdir): os.makedirs(CertUtil.ca_certdir) # Confirmed GoAgent CA exist if not os.path.exists(CertUtil.ca_keyfile): xlog.info("no CA file exist") xlog.info("clean old site certs") any(os.remove(x) for x in glob.glob(CertUtil.ca_certdir+'/*.crt')+glob.glob(CertUtil.ca_certdir+'/.*.crt')) if os.name == 'nt': CertUtil.remove_windows_ca('%s CA' % CertUtil.ca_vendor) CertUtil.generate_ca_file() # Load GoAgent CA with open(CertUtil.ca_keyfile, 'rb') as fp: CertUtil.ca_thumbprint = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, fp.read()).digest('sha1') #Check exist site cert buffer with CA certfiles = glob.glob(CertUtil.ca_certdir+'/*.crt')+glob.glob(CertUtil.ca_certdir+'/.*.crt') if certfiles: filename = random.choice(certfiles) commonname = os.path.splitext(os.path.basename(filename))[0] with open(filename, 'rb') as fp: serial_number = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, fp.read()).get_serial_number() if serial_number != CertUtil.get_cert_serial_number(commonname): any(os.remove(x) for x in certfiles) CertUtil.import_ca(CertUtil.ca_keyfile) # change the status, # web_control /cert_import_status will return True, else return False # launcher will wait ready to open browser and check update config.cert_import_ready = True
def handler(method, url, headers, body, wfile): time_request = time.time() headers = clean_empty_header(headers) errors = [] response = None while True: if time.time() - time_request > 30: #time out return return_fail_message(wfile) try: response = fetch(method, url, headers, body) if response.app_status != 200: xlog.warn("fetch gae status:%s url:%s", response.app_status, url) try: server_type = response.getheader('Server', "") if "gws" not in server_type and "Google Frontend" not in server_type and "GFE" not in server_type: xlog.warn("IP:%s not support GAE, server type:%s", response.ssl_sock.ip, server_type) google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True) response.close() continue except Exception as e: errors.append(e) xlog.warn('gae_handler.handler %r %s , retry...', e, url) continue if response.app_status == 404: #xlog.warning('APPID %r not exists, remove it.', response.ssl_sock.appid) appid_manager.report_not_exist(response.ssl_sock.appid, response.ssl_sock.ip) google_ip.report_connect_closed(response.ssl_sock.ip, "appid not exist") appid = appid_manager.get_appid() if not appid: html = generate_message_html('404 No usable Appid Exists', u'没有可用appid了,请配置可用的appid') send_response(wfile, 404, body=html.encode('utf-8')) response.close() return else: response.close() continue if response.app_status == 403 or response.app_status == 405: #Method not allowed # google have changed from gws to gvs, need to remove. xlog.warning('405 Method not allowed. remove %s ', response.ssl_sock.ip) # some ip can connect, and server type is gws # but can't use as GAE server # so we need remove it immediately google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True) response.close() continue if response.app_status == 503: xlog.warning('APPID %r out of Quota, remove it.', response.ssl_sock.appid) appid_manager.report_out_of_quota(response.ssl_sock.appid) google_ip.report_connect_closed(response.ssl_sock.ip, "get_timeout") appid = appid_manager.get_appid() if not appid: html = generate_message_html('503 No usable Appid Exists', u'appid流量不足,请增加appid') send_response(wfile, 503, body=html.encode('utf-8')) response.close() return else: response.close() continue if response.app_status < 500: break except GAE_Exception as e: errors.append(e) xlog.warn("gae_exception:%r %s", e, url) except Exception as e: errors.append(e) xlog.exception('gae_handler.handler %r %s , retry...', e, url) if response.status == 206: return RangeFetch(method, url, headers, body, response, wfile).fetch() try: wfile.write("HTTP/1.1 %d %s\r\n" % (response.status, response.reason)) response_headers = {} for key, value in response.getheaders(): key = key.title() if key == 'Transfer-Encoding': #http://en.wikipedia.org/wiki/Chunked_transfer_encoding continue if key in skip_headers: continue response_headers[key] = value if 'X-Head-Content-Length' in response_headers: if method == "HEAD": response_headers['Content-Length'] = response_headers['X-Head-Content-Length'] del response_headers['X-Head-Content-Length'] send_to_browser = True try: for key in response_headers: value = response_headers[key] send_header(wfile, key, value) #logging.debug("Head- %s: %s", key, value) wfile.write("\r\n") except Exception as e: send_to_browser = False xlog.warn("gae_handler.handler send response fail. t:%d e:%r %s", time.time()-time_request, e, url) if len(response.app_msg): xlog.warn("APPID error:%d url:%s", response.status, url) wfile.write(response.app_msg) google_ip.report_connect_closed(response.ssl_sock.ip, "app err") response.close() return content_length = int(response.getheader('Content-Length', 0)) content_range = response.getheader('Content-Range', '') if content_range: start, end, length = tuple(int(x) for x in re.search(r'bytes (\d+)-(\d+)/(\d+)', content_range).group(1, 2, 3)) else: start, end, length = 0, content_length-1, content_length body_length = end - start + 1 last_read_time = time.time() time_response = time.time() while True: if start > end: time_finished = time.time() if body_length > 1024 and time_finished - time_response > 0: speed = body_length / (time_finished - time_response) xlog.info("GAE %d|%s|%d t:%d s:%d hs:%d Spd:%d %d %s", response.ssl_sock.fd, response.ssl_sock.ip, response.ssl_sock.received_size, (time_finished-time_request)*1000, length, response.ssl_sock.handshake_time, int(speed), response.status, url) else: xlog.info("GAE %d|%s|%d t:%d s:%d hs:%d %d %s", response.ssl_sock.fd, response.ssl_sock.ip, response.ssl_sock.received_size, (time_finished-time_request)*1000, length, response.ssl_sock.handshake_time, response.status, url) response.ssl_sock.received_size += body_length https_manager.save_ssl_connection_for_reuse(response.ssl_sock, call_time=time_request) return data = response.read(config.AUTORANGE_BUFSIZE) if not data: if time.time() - last_read_time > 20: google_ip.report_connect_closed(response.ssl_sock.ip, "down fail") response.close() xlog.warn("read timeout t:%d len:%d left:%d %s", (time.time()-time_request)*1000, length, (end-start), url) return else: time.sleep(0.1) continue last_read_time = time.time() data_len = len(data) start += data_len if send_to_browser: try: ret = wfile.write(data) if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ: xlog.debug("send to browser wfile.write ret:%d", ret) ret = wfile.write(data) except Exception as e_b: if e_b[0] in (errno.ECONNABORTED, errno.EPIPE, errno.ECONNRESET) or 'bad write retry' in repr(e_b): xlog.warn('gae_handler send to browser return %r %r', e_b, url) else: xlog.warn('gae_handler send to browser return %r %r', e_b, url) send_to_browser = False except NetWorkIOError as e: time_except = time.time() time_cost = time_except - time_request if e[0] in (errno.ECONNABORTED, errno.EPIPE) or 'bad write retry' in repr(e): xlog.warn("gae_handler err:%r time:%d %s ", e, time_cost, url) google_ip.report_connect_closed(response.ssl_sock.ip, "Net") else: xlog.exception("gae_handler except:%r %s", e, url) except Exception as e: xlog.exception("gae_handler except:%r %s", e, url)
def update_pacfile(filename): opener = get_opener() listen_ip = config.LISTEN_IP autoproxy = gae_proxy_listen blackhole = pac_listen default = 'DIRECT' if config.PAC_ADBLOCK: try: xlog.info('try download %r to update_pacfile(%r)', config.PAC_ADBLOCK, filename) adblock_content = opener.open(config.PAC_ADBLOCK).read() except Exception as e: xlog.warn("pac_update download adblock fail:%r", e) return try: xlog.info('try download %r to update_pacfile(%r)', config.PAC_GFWLIST, filename) pac_content = opener.open(config.PAC_GFWLIST).read() except Exception as e: xlog.warn("pac_update download gfwlist fail:%r", e) return content = '' need_update = True with open(get_serving_pacfile(), 'rb') as fp: content = fp.read() try: placeholder = '// AUTO-GENERATED RULES, DO NOT MODIFY!' content = content[:content.index(placeholder) + len(placeholder)] content = re.sub(r'''blackhole\s*=\s*['"]PROXY [\.\w:]+['"]''', 'blackhole = \'PROXY %s\'' % blackhole, content) content = re.sub(r'''autoproxy\s*=\s*['"]PROXY [\.\w:]+['"]''', 'autoproxy = \'PROXY %s\'' % autoproxy, content) if content.startswith('//'): line = '// Proxy Auto-Config file generated by autoproxy2pac, %s\r\n' % time.strftime( '%Y-%m-%d %H:%M:%S') content = line + '\r\n'.join(content.splitlines()[1:]) except ValueError: need_update = False try: if config.PAC_ADBLOCK: xlog.info('%r downloaded, try convert it with adblock2pac', config.PAC_ADBLOCK) jsrule = PacUtil.adblock2pac(adblock_content, 'FindProxyForURLByAdblock', blackhole, default) content += '\r\n' + jsrule + '\r\n' xlog.info('%r downloaded and parsed', config.PAC_ADBLOCK) else: content += '\r\nfunction FindProxyForURLByAdblock(url, host) {return "DIRECT";}\r\n' except Exception as e: need_update = False xlog.exception('update_pacfile failed: %r', e) return try: autoproxy_content = base64.b64decode(pac_content) xlog.info('%r downloaded, try convert it with autoproxy2pac', config.PAC_GFWLIST) jsrule = PacUtil.autoproxy2pac(autoproxy_content, 'FindProxyForURLByAutoProxy', autoproxy, default) content += '\r\n' + jsrule + '\r\n' xlog.info('%r downloaded and parsed', config.PAC_GFWLIST) except Exception as e: need_update = False xlog.exception('update_pacfile failed: %r', e) return if need_update: with open(user_pacfile, 'wb') as fp: fp.write(content) xlog.info('%r successfully updated', user_pacfile) serving_pacfile = user_pacfile
def update_pacfile(filename): opener = get_opener() listen_ip = config.LISTEN_IP autoproxy = gae_proxy_listen blackhole = pac_listen default = 'DIRECT' if config.PAC_ADBLOCK: try: xlog.info('try download %r to update_pacfile(%r)', config.PAC_ADBLOCK, filename) adblock_content = opener.open(config.PAC_ADBLOCK).read() except Exception as e: xlog.warn("pac_update download adblock fail:%r", e) return try: xlog.info('try download %r to update_pacfile(%r)', config.PAC_GFWLIST, filename) pac_content = opener.open(config.PAC_GFWLIST).read() except Exception as e: xlog.warn("pac_update download gfwlist fail:%r", e) return content = '' need_update = True with open(get_serving_pacfile(), 'rb') as fp: content = fp.read() try: placeholder = '// AUTO-GENERATED RULES, DO NOT MODIFY!' content = content[:content.index(placeholder)+len(placeholder)] content = re.sub(r'''blackhole\s*=\s*['"]PROXY [\.\w:]+['"]''', 'blackhole = \'PROXY %s\'' % blackhole, content) content = re.sub(r'''autoproxy\s*=\s*['"]PROXY [\.\w:]+['"]''', 'autoproxy = \'PROXY %s\'' % autoproxy, content) if content.startswith('//'): line = '// Proxy Auto-Config file generated by autoproxy2pac, %s\r\n' % time.strftime('%Y-%m-%d %H:%M:%S') content = line + '\r\n'.join(content.splitlines()[1:]) except ValueError: need_update = False try: if config.PAC_ADBLOCK: xlog.info('%r downloaded, try convert it with adblock2pac', config.PAC_ADBLOCK) jsrule = PacUtil.adblock2pac(adblock_content, 'FindProxyForURLByAdblock', blackhole, default) content += '\r\n' + jsrule + '\r\n' xlog.info('%r downloaded and parsed', config.PAC_ADBLOCK) else: content += '\r\nfunction FindProxyForURLByAdblock(url, host) {return "DIRECT";}\r\n' except Exception as e: need_update = False xlog.exception('update_pacfile failed: %r', e) return try: autoproxy_content = base64.b64decode(pac_content) xlog.info('%r downloaded, try convert it with autoproxy2pac', config.PAC_GFWLIST) jsrule = PacUtil.autoproxy2pac(autoproxy_content, 'FindProxyForURLByAutoProxy', autoproxy, default) content += '\r\n' + jsrule + '\r\n' xlog.info('%r downloaded and parsed', config.PAC_GFWLIST) except Exception as e: need_update = False xlog.exception('update_pacfile failed: %r', e) return if need_update: with open(user_pacfile, 'wb') as fp: fp.write(content) xlog.info('%r successfully updated', user_pacfile) serving_pacfile = user_pacfile
def do_CONNECT_DIRECT(self): """deploy fake cert to client""" host, _, port = self.path.rpartition(':') port = int(port) if port != 443: xlog.warn("CONNECT %s port:%d not support", host, port) return certfile = CertUtil.get_cert(host) xlog.info('GAE %s %s:%d ', self.command, host, port) self.__realconnection = None self.wfile.write(b'HTTP/1.1 200 OK\r\n\r\n') try: ssl_sock = ssl.wrap_socket(self.connection, keyfile=certfile, certfile=certfile, server_side=True) except ssl.SSLError as e: xlog.info('ssl error: %s, create full domain cert for host:%s', e, host) certfile = CertUtil.get_cert(host, full_name=True) return except Exception as e: if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET): xlog.exception( 'ssl.wrap_socket(self.connection=%r) failed: %s path:%s, errno:%s', self.connection, e, self.path, e.args[0]) return self.__realconnection = self.connection self.__realwfile = self.wfile self.__realrfile = self.rfile self.connection = ssl_sock self.rfile = self.connection.makefile('rb', self.bufsize) self.wfile = self.connection.makefile('wb', 0) try: self.raw_requestline = self.rfile.readline(65537) if len(self.raw_requestline) > 65536: self.requestline = '' self.request_version = '' self.command = '' self.send_error(414) return if not self.raw_requestline: self.close_connection = 1 return if not self.parse_request(): return except NetWorkIOError as e: if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET, errno.EPIPE): raise if self.path[0] == '/' and host: self.path = 'https://%s%s' % (self.headers['Host'], self.path) xlog.debug('GAE CONNECT Direct %s %s', self.command, self.path) try: if self.path[0] == '/' and host: self.path = 'http://%s%s' % (host, self.path) elif not host and '://' in self.path: host = urlparse.urlparse(self.path).netloc self.parsed_url = urlparse.urlparse(self.path) if len(self.parsed_url[4]): path = '?'.join([self.parsed_url[2], self.parsed_url[4]]) else: path = self.parsed_url[2] request_headers = dict( (k.title(), v) for k, v in self.headers.items()) payload = b'' if 'Content-Length' in request_headers: try: payload_len = int(request_headers.get('Content-Length', 0)) #logging.debug("payload_len:%d %s %s", payload_len, self.command, self.path) payload = self.rfile.read(payload_len) except NetWorkIOError as e: xlog.error('handle_method_urlfetch read payload failed:%s', e) return direct_handler.handler(self.command, host, path, request_headers, payload, self.wfile) except NetWorkIOError as e: if e.args[0] not in (errno.ECONNABORTED, errno.ETIMEDOUT, errno.EPIPE): raise finally: if self.__realconnection: try: self.__realconnection.shutdown(socket.SHUT_WR) self.__realconnection.close() except NetWorkIOError: pass finally: self.__realconnection = None
def handler(method, host, url, headers, body, wfile): time_request = time.time() if "Connection" in headers and headers["Connection"] == "close": del headers["Connection"] errors = [] response = None while True: if time.time() - time_request > 30: return return_fail_message(wfile) try: response = fetch(method, host, url, headers, body) if response: if response.status > 400: server_type = response.getheader('Server', "") if "gws" not in server_type and "Google Frontend" not in server_type and "GFE" not in server_type: xlog.warn("IP:%s not support GAE, server type:%s status:%d", response.ssl_sock.ip, server_type, response.status) google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True) response.close() continue break except OpenSSL.SSL.SysCallError as e: errors.append(e) xlog.warn("direct_handler.handler err:%r %s/%s", e, host, url) except Exception as e: errors.append(e) xlog.exception('direct_handler.handler %r %s %s , retry...', e, host, url) try: send_to_browser = True try: response_headers = dict((k.title(), v) for k, v in response.getheaders()) wfile.write("HTTP/1.1 %d %s\r\n" % (response.status, response.reason)) for key, value in response.getheaders(): send_header(wfile, key, value) wfile.write("\r\n") except Exception as e: send_to_browser = False wait_time = time.time()-time_request xlog.warn("direct_handler.handler send response fail. t:%d e:%r %s%s", wait_time, e, host, url) if method == 'HEAD' or response.status in (204, 304): xlog.info("DIRECT t:%d %d %s %s", (time.time()-time_request)*1000, response.status, host, url) https_manager.save_ssl_connection_for_reuse(response.ssl_sock, host) response.close() return if 'Transfer-Encoding' in response_headers: length = 0 while True: try: data = response.read(8192) except httplib.IncompleteRead, e: data = e.partial except Exception as e: google_ip.report_connect_closed(response.ssl_sock.ip, "receive fail") xlog.warn("direct_handler.handler send Transfer-Encoding t:%d e:%r %s/%s", time.time()-time_request, e, host, url) response.close() return if send_to_browser: try: if not data: wfile.write('0\r\n\r\n') break length += len(data) wfile.write('%x\r\n' % len(data)) wfile.write(data) wfile.write('\r\n') except Exception as e: send_to_browser = False xlog.warn("direct_handler.handler send Transfer-Encoding t:%d e:%r %s/%s", time.time()-time_request, e, host, url) else: if not data: break
def __fetchlet(self, range_queue, data_queue, range_delay_size): headers = dict((k.title(), v) for k, v in self.headers.items()) headers['Connection'] = 'close' while not self._stopped: try: try: start, end, response = range_queue.get(timeout=1) if self.expect_begin < start and data_queue.qsize( ) * self.bufsize + range_delay_size > 30 * 1024 * 1024: range_queue.put((start, end, response)) time.sleep(10) continue headers['Range'] = 'bytes=%d-%d' % (start, end) if not response: response = fetch(self.method, self.url, headers, self.body) except Queue.Empty: continue except Exception as e: xlog.warning("RangeFetch fetch response %r in __fetchlet", e) range_queue.put((start, end, None)) continue if not response: xlog.warning('RangeFetch %s return %r', headers['Range'], response) range_queue.put((start, end, None)) continue if response.app_status != 200: xlog.warning('Range Fetch return %s "%s %s" %s ', response.app_status, self.method, self.url, headers['Range']) if response.app_status == 404: xlog.warning('APPID %r not exists, remove it.', response.ssl_sock.appid) appid_manager.report_not_exist(response.ssl_sock.appid) appid = appid_manager.get_appid() if not appid: xlog.error("no appid left") self._stopped = True response.close() return if response.app_status == 503: xlog.warning( 'APPID %r out of Quota, remove it temporary.', response.ssl_sock.appid) appid_manager.report_out_of_quota( response.ssl_sock.appid) appid = appid_manager.get_appid() if not appid: xlog.error("no appid left") self._stopped = True response.close() return google_ip.report_connect_closed(response.ssl_sock.ip, "app err") response.close() range_queue.put((start, end, None)) continue if response.getheader('Location'): self.url = urlparse.urljoin(self.url, response.getheader('Location')) xlog.info('RangeFetch Redirect(%r)', self.url) google_ip.report_connect_closed(response.ssl_sock.ip, "reLocation") response.close() range_queue.put((start, end, None)) continue if 200 <= response.status < 300: content_range = response.getheader('Content-Range') if not content_range: xlog.warning( 'RangeFetch "%s %s" return Content-Range=%r: response headers=%r, retry %s-%s', self.method, self.url, content_range, response.getheaders(), start, end) google_ip.report_connect_closed( response.ssl_sock.ip, "no range") response.close() range_queue.put((start, end, None)) continue content_length = int( response.getheader('Content-Length', 0)) xlog.info('>>>>>>>>>>>>>>> [thread %s] %s %s', threading.currentThread().ident, content_length, content_range) time_last_read = time.time() while start < end + 1: try: data = response.read(self.bufsize) if not data: if time.time() - time_last_read > 20: break else: time.sleep(0.1) continue time_last_read = time.time() data_len = len(data) data_queue.put((start, data)) start += data_len except Exception as e: xlog.warning('RangeFetch "%s %s" %s failed: %s', self.method, self.url, headers['Range'], e) break if start < end + 1: xlog.warning('RangeFetch "%s %s" retry %s-%s', self.method, self.url, start, end) google_ip.report_connect_closed( response.ssl_sock.ip, "down err") response.close() range_queue.put((start, end, None)) continue https_manager.save_ssl_connection_for_reuse( response.ssl_sock) xlog.info('>>>>>>>>>>>>>>> Successfully reached %d bytes.', start - 1) else: xlog.error('RangeFetch %r return %s', self.url, response.status) google_ip.report_connect_closed(response.ssl_sock.ip, "status err") response.close() range_queue.put((start, end, None)) continue except StandardError as e: xlog.exception('RangeFetch._fetchlet error:%s', e) raise
def handler(method, url, headers, body, wfile): time_request = time.time() headers = clean_empty_header(headers) errors = [] response = None while True: if time.time() - time_request > 30: #time out return return_fail_message(wfile) try: response = fetch(method, url, headers, body) if response.app_status != 200: xlog.warn("fetch gae status:%s url:%s", response.app_status, url) try: server_type = response.getheader('Server', "") if "gws" not in server_type and "Google Frontend" not in server_type: xlog.warn("IP:%s not support GAE, server type:%s", response.ssl_sock.ip, server_type) google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True) response.close() continue except Exception as e: errors.append(e) xlog.warn('gae_handler.handler %r %s , retry...', e, url) continue if response.app_status == 404: xlog.warning('APPID %r not exists, remove it.', response.ssl_sock.appid) appid_manager.report_not_exist(response.ssl_sock.appid) google_ip.report_connect_closed(response.ssl_sock.ip, "appid not exist") appid = appid_manager.get_appid() if not appid: html = generate_message_html('404 No usable Appid Exists', u'没有可用appid了,请配置可用的appid') send_response(wfile, 404, body=html.encode('utf-8')) response.close() return else: response.close() continue if response.app_status == 403 or response.app_status == 405: #Method not allowed # google have changed from gws to gvs, need to remove. xlog.warning('405 Method not allowed. remove %s ', response.ssl_sock.ip) # some ip can connect, and server type is gws # but can't use as GAE server # so we need remove it immediately google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True) response.close() continue if response.app_status == 503: xlog.warning('APPID %r out of Quota, remove it.', response.ssl_sock.appid) appid_manager.report_out_of_quota(response.ssl_sock.appid) google_ip.report_connect_closed(response.ssl_sock.ip, "get_timeout") appid = appid_manager.get_appid() if not appid: html = generate_message_html('503 No usable Appid Exists', u'appid流量不足,请增加appid') send_response(wfile, 503, body=html.encode('utf-8')) response.close() return else: response.close() continue if response.app_status < 500: break except GAE_Exception as e: errors.append(e) xlog.warn("gae_exception:%r %s", e, url) except Exception as e: errors.append(e) xlog.exception('gae_handler.handler %r %s , retry...', e, url) if response.status == 206: return RangeFetch(method, url, headers, body, response, wfile).fetch() try: wfile.write("HTTP/1.1 %d %s\r\n" % (response.status, response.reason)) response_headers = {} for key, value in response.getheaders(): key = key.title() if key == 'Transfer-Encoding': #http://en.wikipedia.org/wiki/Chunked_transfer_encoding continue if key in skip_headers: continue response_headers[key] = value if 'X-Head-Content-Length' in response_headers: if method == "HEAD": response_headers['Content-Length'] = response_headers[ 'X-Head-Content-Length'] del response_headers['X-Head-Content-Length'] send_to_browser = True try: for key in response_headers: value = response_headers[key] send_header(wfile, key, value) #logging.debug("Head- %s: %s", key, value) wfile.write("\r\n") except Exception as e: send_to_browser = False xlog.warn("gae_handler.handler send response fail. t:%d e:%r %s", time.time() - time_request, e, url) if len(response.app_msg): xlog.warn("APPID error:%d url:%s", response.status, url) wfile.write(response.app_msg) google_ip.report_connect_closed(response.ssl_sock.ip, "app err") response.close() return content_length = int(response.getheader('Content-Length', 0)) content_range = response.getheader('Content-Range', '') if content_range: start, end, length = tuple( int(x) for x in re.search(r'bytes (\d+)-(\d+)/(\d+)', content_range).group(1, 2, 3)) else: start, end, length = 0, content_length - 1, content_length body_length = end - start + 1 last_read_time = time.time() time_response = time.time() while True: if start > end: time_finished = time.time() if body_length > 1024 and time_finished - time_response > 0: speed = body_length / (time_finished - time_response) xlog.info("GAE %d|%s|%d t:%d s:%d hs:%d Spd:%d %d %s", response.ssl_sock.fd, response.ssl_sock.ip, response.ssl_sock.received_size, (time_finished - time_request) * 1000, length, response.ssl_sock.handshake_time, int(speed), response.status, url) else: xlog.info("GAE %d|%s|%d t:%d s:%d hs:%d %d %s", response.ssl_sock.fd, response.ssl_sock.ip, response.ssl_sock.received_size, (time_finished - time_request) * 1000, length, response.ssl_sock.handshake_time, response.status, url) response.ssl_sock.received_size += body_length google_ip.report_ip_traffic(response.ssl_sock.ip, body_length) https_manager.save_ssl_connection_for_reuse( response.ssl_sock, call_time=time_request) return data = response.read(config.AUTORANGE_BUFSIZE) if not data: if time.time() - last_read_time > 20: google_ip.report_connect_closed(response.ssl_sock.ip, "down fail") response.close() xlog.warn("read timeout t:%d len:%d left:%d %s", (time.time() - time_request) * 1000, length, (end - start), url) return else: time.sleep(0.1) continue last_read_time = time.time() data_len = len(data) start += data_len if send_to_browser: try: ret = wfile.write(data) if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ: xlog.debug("send to browser wfile.write ret:%d", ret) ret = wfile.write(data) except Exception as e_b: if e_b[0] in (errno.ECONNABORTED, errno.EPIPE, errno.ECONNRESET ) or 'bad write retry' in repr(e_b): xlog.warn('gae_handler send to browser return %r %r', e_b, url) else: xlog.warn('gae_handler send to browser return %r %r', e_b, url) send_to_browser = False except NetWorkIOError as e: time_except = time.time() time_cost = time_except - time_request if e[0] in (errno.ECONNABORTED, errno.EPIPE) or 'bad write retry' in repr(e): xlog.warn("gae_handler err:%r time:%d %s ", e, time_cost, url) google_ip.report_connect_closed(response.ssl_sock.ip, "Net") else: xlog.exception("gae_handler except:%r %s", e, url) except Exception as e: xlog.exception("gae_handler except:%r %s", e, url)
def do_CONNECT_DIRECT(self): """deploy fake cert to client""" host, _, port = self.path.rpartition(':') port = int(port) if port != 443: xlog.warn("CONNECT %s port:%d not support", host, port) return certfile = CertUtil.get_cert(host) xlog.info('GAE %s %s:%d ', self.command, host, port) self.__realconnection = None self.wfile.write(b'HTTP/1.1 200 OK\r\n\r\n') try: ssl_sock = ssl.wrap_socket(self.connection, keyfile=certfile, certfile=certfile, server_side=True) except ssl.SSLError as e: xlog.info('ssl error: %s, create full domain cert for host:%s', e, host) certfile = CertUtil.get_cert(host, full_name=True) return except Exception as e: if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET): xlog.exception('ssl.wrap_socket(self.connection=%r) failed: %s path:%s, errno:%s', self.connection, e, self.path, e.args[0]) return self.__realconnection = self.connection self.__realwfile = self.wfile self.__realrfile = self.rfile self.connection = ssl_sock self.rfile = self.connection.makefile('rb', self.bufsize) self.wfile = self.connection.makefile('wb', 0) try: self.raw_requestline = self.rfile.readline(65537) if len(self.raw_requestline) > 65536: self.requestline = '' self.request_version = '' self.command = '' self.send_error(414) return if not self.raw_requestline: self.close_connection = 1 return if not self.parse_request(): return except NetWorkIOError as e: if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET, errno.EPIPE): raise if self.path[0] == '/' and host: self.path = 'https://%s%s' % (self.headers['Host'], self.path) xlog.debug('GAE CONNECT %s %s', self.command, self.path) try: if self.path[0] == '/' and host: self.path = 'http://%s%s' % (host, self.path) elif not host and '://' in self.path: host = urlparse.urlparse(self.path).netloc self.parsed_url = urlparse.urlparse(self.path) if len(self.parsed_url[4]): path = '?'.join([self.parsed_url[2], self.parsed_url[4]]) else: path = self.parsed_url[2] request_headers = dict((k.title(), v) for k, v in self.headers.items()) payload = b'' if 'Content-Length' in request_headers: try: payload_len = int(request_headers.get('Content-Length', 0)) #logging.debug("payload_len:%d %s %s", payload_len, self.command, self.path) payload = self.rfile.read(payload_len) except NetWorkIOError as e: xlog.error('handle_method_urlfetch read payload failed:%s', e) return direct_handler.handler(self.command, host, path, request_headers, payload, self.wfile) except NetWorkIOError as e: if e.args[0] not in (errno.ECONNABORTED, errno.ETIMEDOUT, errno.EPIPE): raise finally: if self.__realconnection: try: self.__realconnection.shutdown(socket.SHUT_WR) self.__realconnection.close() except NetWorkIOError: pass finally: self.__realconnection = None
def do_CONNECT_AGENT(self): """deploy fake cert to client""" # GAE supports the following HTTP methods: GET, POST, HEAD, PUT, DELETE, and PATCH host, _, port = self.path.rpartition(':') port = int(port) certfile = CertUtil.get_cert(host) xlog.info('GAE %s %s:%d ', self.command, host, port) self.__realconnection = None self.wfile.write(b'HTTP/1.1 200 OK\r\n\r\n') try: ssl_sock = ssl.wrap_socket(self.connection, keyfile=certfile, certfile=certfile, server_side=True) except ssl.SSLError as e: xlog.info('ssl error: %s, create full domain cert for host:%s', e, host) certfile = CertUtil.get_cert(host, full_name=True) return except Exception as e: if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET): xlog.exception('ssl.wrap_socket(self.connection=%r) failed: %s path:%s, errno:%s', self.connection, e, self.path, e.args[0]) return self.__realconnection = self.connection self.__realwfile = self.wfile self.__realrfile = self.rfile self.connection = ssl_sock self.rfile = self.connection.makefile('rb', self.bufsize) self.wfile = self.connection.makefile('wb', 0) try: self.raw_requestline = self.rfile.readline(65537) if len(self.raw_requestline) > 65536: self.requestline = '' self.request_version = '' self.command = '' self.send_error(414) xlog.warn("read request line len:%d", len(self.raw_requestline)) return if not self.raw_requestline: xlog.warn("read request line empty") return if not self.parse_request(): xlog.warn("parse request fail:%s", self.raw_requestline) return except NetWorkIOError as e: if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET, errno.EPIPE): xlog.exception('ssl.wrap_socket(self.connection=%r) failed: %s path:%s, errno:%s', self.connection, e, self.path, e.args[0]) raise if self.path[0] == '/' and host: self.path = 'https://%s%s' % (self.headers['Host'], self.path) xlog.debug('GAE CONNECT %s %s', self.command, self.path) if self.command not in self.gae_support_methods: if host.endswith(".google.com") or host.endswith(config.HOSTS_DIRECT_ENDSWITH) or host.endswith(config.HOSTS_GAE_ENDSWITH): if host in config.HOSTS_GAE: gae_set = [s for s in config.HOSTS_GAE] gae_set.remove(host) config.HOSTS_GAE = tuple(gae_set) if host not in config.HOSTS_DIRECT: fwd_set = [s for s in config.HOSTS_DIRECT] fwd_set.append(host) config.HOSTS_DIRECT = tuple(fwd_set) xlog.warn("Method %s not support in GAE, Redirect to DIRECT for %s", self.command, self.path) return self.wfile.write(('HTTP/1.1 301\r\nLocation: %s\r\n\r\n' % self.path).encode()) else: xlog.warn("Method %s not support in GAEProxy for %s", self.command, self.path) return self.wfile.write(('HTTP/1.1 404 Not Found\r\n\r\n').encode()) try: if self.path[0] == '/' and host: self.path = 'http://%s%s' % (host, self.path) elif not host and '://' in self.path: host = urlparse.urlparse(self.path).netloc self.parsed_url = urlparse.urlparse(self.path) return self.do_AGENT() except NetWorkIOError as e: if e.args[0] not in (errno.ECONNABORTED, errno.ETIMEDOUT, errno.EPIPE): raise finally: if self.__realconnection: try: self.__realconnection.shutdown(socket.SHUT_WR) self.__realconnection.close() except NetWorkIOError: pass finally: self.__realconnection = None
def req_deploy_handler(self): global deploy_proc req = urlparse.urlparse(self.path).query reqs = urlparse.parse_qs(req, keep_blank_values=True) data = '' log_path = os.path.abspath( os.path.join(current_path, os.pardir, "server", 'upload.log')) time_now = datetime.datetime.today().strftime('%H:%M:%S-%a/%d/%b/%Y') if reqs['cmd'] == ['deploy']: appid = self.postvars['appid'][0] if deploy_proc and deploy_proc.poll() == None: xlog.warn("deploy is running, request denied.") data = '{"res":"deploy is running", "time":"%s"}' % time_now else: try: if os.path.isfile(log_path): os.remove(log_path) script_path = os.path.abspath( os.path.join(current_path, os.pardir, "server", 'uploader.py')) email = self.postvars['email'][0] passwd = self.postvars['passwd'][0] rc4_passwd = self.postvars['rc4_passwd'][0] deploy_proc = subprocess.Popen([ sys.executable, script_path, appid, email, passwd, rc4_passwd ]) xlog.info("deploy begin.") data = '{"res":"success", "time":"%s"}' % time_now except Exception as e: data = '{"res":"%s", "time":"%s"}' % (e, time_now) elif reqs['cmd'] == ['cancel']: if deploy_proc and deploy_proc.poll() == None: deploy_proc.kill() data = '{"res":"deploy is killed", "time":"%s"}' % time_now else: data = '{"res":"deploy is not running", "time":"%s"}' % time_now elif reqs['cmd'] == ['get_log']: if deploy_proc and os.path.isfile(log_path): with open(log_path, "r") as f: content = f.read() else: content = "" status = 'init' if deploy_proc: if deploy_proc.poll() == None: status = 'running' else: status = 'finished' data = json.dumps({ 'status': status, 'log': content, 'time': time_now }) self.send_response('text/html', data)
def fetch(self): response_headers = dict((k.title(), v) for k, v in self.response.getheaders()) content_range = response_headers['Content-Range'] start, end, length = tuple(int(x) for x in re.search(r'bytes (\d+)-(\d+)/(\d+)', content_range).group(1, 2, 3)) if start == 0: response_headers['Content-Length'] = str(length) del response_headers['Content-Range'] else: response_headers['Content-Range'] = 'bytes %s-%s/%s' % (start, end, length) response_headers['Content-Length'] = str(length-start) xlog.info('>>>>>>>>>>>>>>> RangeFetch started(%r) %d-%d', self.url, start, end) try: self.wfile.write("HTTP/1.1 200 OK\r\n") for key in response_headers: if key == 'Transfer-Encoding': continue if key == 'X-Head-Content-Length': continue if key in skip_headers: continue value = response_headers[key] #logging.debug("Head %s: %s", key.title(), value) send_header(self.wfile, key, value) self.wfile.write("\r\n") except Exception as e: self._stopped = True xlog.warn("RangeFetch send response fail:%r %s", e, self.url) return data_queue = Queue.PriorityQueue() range_queue = Queue.PriorityQueue() range_queue.put((start, end, self.response)) self.expect_begin = start for begin in range(end+1, length, self.maxsize): range_queue.put((begin, min(begin+self.maxsize-1, length-1), None)) for i in xrange(0, self.threads): range_delay_size = i * self.maxsize spawn_later(float(range_delay_size)/self.waitsize, self.__fetchlet, range_queue, data_queue, range_delay_size) has_peek = hasattr(data_queue, 'peek') peek_timeout = 120 while self.expect_begin < length - 1: try: if has_peek: begin, data = data_queue.peek(timeout=peek_timeout) if self.expect_begin == begin: data_queue.get() elif self.expect_begin < begin: time.sleep(0.1) continue else: xlog.error('RangeFetch Error: begin(%r) < expect_begin(%r), quit.', begin, self.expect_begin) break else: begin, data = data_queue.get(timeout=peek_timeout) if self.expect_begin == begin: pass elif self.expect_begin < begin: data_queue.put((begin, data)) time.sleep(0.1) continue else: xlog.error('RangeFetch Error: begin(%r) < expect_begin(%r), quit.', begin, self.expect_begin) break except Queue.Empty: xlog.error('data_queue peek timeout, break') break try: ret = self.wfile.write(data) if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ: xlog.debug("send to browser wfile.write ret:%d, retry", ret) ret = self.wfile.write(data) xlog.debug("send to browser wfile.write ret:%d", ret) self.expect_begin += len(data) del data except Exception as e: xlog.warn('RangeFetch client closed(%s). %s', e, self.url) break self._stopped = True
def do_CONNECT_AGENT(self): """deploy fake cert to client""" # GAE supports the following HTTP methods: GET, POST, HEAD, PUT, DELETE, and PATCH host, _, port = self.path.rpartition(':') port = int(port) certfile = CertUtil.get_cert(host) xlog.info('GAE %s %s:%d ', self.command, host, port) self.__realconnection = None self.wfile.write(b'HTTP/1.1 200 OK\r\n\r\n') try: ssl_sock = ssl.wrap_socket(self.connection, keyfile=certfile, certfile=certfile, server_side=True) except ssl.SSLError as e: xlog.info('ssl error: %s, create full domain cert for host:%s', e, host) certfile = CertUtil.get_cert(host, full_name=True) return except Exception as e: if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET): xlog.exception( 'ssl.wrap_socket(self.connection=%r) failed: %s path:%s, errno:%s', self.connection, e, self.path, e.args[0]) return self.__realconnection = self.connection self.__realwfile = self.wfile self.__realrfile = self.rfile self.connection = ssl_sock self.rfile = self.connection.makefile('rb', self.bufsize) self.wfile = self.connection.makefile('wb', 0) try: self.raw_requestline = self.rfile.readline(65537) if len(self.raw_requestline) > 65536: self.requestline = '' self.request_version = '' self.command = '' self.send_error(414) xlog.warn("read request line len:%d", len(self.raw_requestline)) return if not self.raw_requestline: xlog.warn("read request line empty") return if not self.parse_request(): xlog.warn("parse request fail:%s", self.raw_requestline) return except NetWorkIOError as e: if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET, errno.EPIPE): xlog.exception( 'ssl.wrap_socket(self.connection=%r) failed: %s path:%s, errno:%s', self.connection, e, self.path, e.args[0]) raise if self.path[0] == '/' and host: self.path = 'https://%s%s' % (self.headers['Host'], self.path) if self.path == "https://www.twitter.com/xxnet": # for web_ui status page # auto detect browser proxy setting is work xlog.debug("CONNECT %s %s", self.command, self.path) return self.wfile.write(self.self_check_response_data) xlog.debug('GAE CONNECT %s %s', self.command, self.path) if self.command not in self.gae_support_methods: if host.endswith(".google.com") or host.endswith( config.HOSTS_DIRECT_ENDSWITH) or host.endswith( config.HOSTS_GAE_ENDSWITH): if host in config.HOSTS_GAE: gae_set = [s for s in config.HOSTS_GAE] gae_set.remove(host) config.HOSTS_GAE = tuple(gae_set) if host not in config.HOSTS_DIRECT: fwd_set = [s for s in config.HOSTS_DIRECT] fwd_set.append(host) config.HOSTS_DIRECT = tuple(fwd_set) xlog.warn( "Method %s not support in GAE, Redirect to DIRECT for %s", self.command, self.path) return self.wfile.write( ('HTTP/1.1 301\r\nLocation: %s\r\n\r\n' % self.path).encode()) else: xlog.warn("Method %s not support in GAEProxy for %s", self.command, self.path) return self.wfile.write( ('HTTP/1.1 404 Not Found\r\n\r\n').encode()) try: if self.path[0] == '/' and host: self.path = 'http://%s%s' % (host, self.path) elif not host and '://' in self.path: host = urlparse.urlparse(self.path).netloc self.parsed_url = urlparse.urlparse(self.path) return self.do_AGENT() except NetWorkIOError as e: if e.args[0] not in (errno.ECONNABORTED, errno.ETIMEDOUT, errno.EPIPE): raise finally: if self.__realconnection: try: self.__realconnection.shutdown(socket.SHUT_WR) self.__realconnection.close() except NetWorkIOError: pass finally: self.__realconnection = None
def __fetchlet(self, range_queue, data_queue, range_delay_size): headers = dict((k.title(), v) for k, v in self.headers.items()) headers['Connection'] = 'close' while not self._stopped: try: try: start, end, response = range_queue.get(timeout=1) if self.expect_begin < start and data_queue.qsize() * self.bufsize + range_delay_size > 30*1024*1024: range_queue.put((start, end, response)) time.sleep(10) continue headers['Range'] = 'bytes=%d-%d' % (start, end) if not response: response = fetch(self.method, self.url, headers, self.body) except Queue.Empty: continue except Exception as e: xlog.warning("RangeFetch fetch response %r in __fetchlet", e) range_queue.put((start, end, None)) continue if not response: xlog.warning('RangeFetch %s return %r', headers['Range'], response) range_queue.put((start, end, None)) continue if response.app_status != 200: xlog.warning('Range Fetch return %s "%s %s" %s ', response.app_status, self.method, self.url, headers['Range']) if response.app_status == 404: xlog.warning('APPID %r not exists, remove it.', response.ssl_sock.appid) appid_manager.report_not_exist(response.ssl_sock.appid, response.ssl_sock.ip) appid = appid_manager.get_appid() if not appid: xlog.error("no appid left") self._stopped = True response.close() return if response.app_status == 503: xlog.warning('APPID %r out of Quota, remove it temporary.', response.ssl_sock.appid) appid_manager.report_out_of_quota(response.ssl_sock.appid) appid = appid_manager.get_appid() if not appid: xlog.error("no appid left") self._stopped = True response.close() return google_ip.report_connect_closed(response.ssl_sock.ip, "app err") response.close() range_queue.put((start, end, None)) continue if response.getheader('Location'): self.url = urlparse.urljoin(self.url, response.getheader('Location')) xlog.info('RangeFetch Redirect(%r)', self.url) google_ip.report_connect_closed(response.ssl_sock.ip, "reLocation") response.close() range_queue.put((start, end, None)) continue if 200 <= response.status < 300: content_range = response.getheader('Content-Range') if not content_range: xlog.warning('RangeFetch "%s %s" return Content-Range=%r: response headers=%r, retry %s-%s', self.method, self.url, content_range, response.getheaders(), start, end) google_ip.report_connect_closed(response.ssl_sock.ip, "no range") response.close() range_queue.put((start, end, None)) continue content_length = int(response.getheader('Content-Length', 0)) xlog.info('>>>>>>>>>>>>>>> [thread %s] %s %s', threading.currentThread().ident, content_length, content_range) time_last_read = time.time() while start < end + 1: try: data = response.read(self.bufsize) if not data: if time.time() - time_last_read > 20: break else: time.sleep(0.1) continue time_last_read = time.time() data_len = len(data) data_queue.put((start, data)) start += data_len except Exception as e: xlog.warning('RangeFetch "%s %s" %s failed: %s', self.method, self.url, headers['Range'], e) break if start < end + 1: xlog.warning('RangeFetch "%s %s" retry %s-%s', self.method, self.url, start, end) google_ip.report_connect_closed(response.ssl_sock.ip, "down err") response.close() range_queue.put((start, end, None)) continue https_manager.save_ssl_connection_for_reuse(response.ssl_sock) xlog.info('>>>>>>>>>>>>>>> Successfully reached %d bytes.', start - 1) else: xlog.error('RangeFetch %r return %s', self.url, response.status) google_ip.report_connect_closed(response.ssl_sock.ip, "status err") response.close() range_queue.put((start, end, None)) continue except StandardError as e: xlog.exception('RangeFetch._fetchlet error:%s', e) raise
def do_GET(self): path = urlparse.urlparse(self.path).path if path == "/log": return self.req_log_handler() elif path == "/status": return self.req_status_handler() else: xlog.debug('GAEProxy Web_control %s %s %s ', self.address_string(), self.command, self.path) if path == '/deploy': return self.req_deploy_handler() elif path == "/config": return self.req_config_handler() elif path == "/ip_list": return self.req_ip_list_handler() elif path == "/scan_ip": return self.req_scan_ip_handler() elif path == "/ssl_pool": return self.req_ssl_pool_handler() elif path == "/download_cert": return self.req_download_cert_handler() elif path == "/is_ready": return self.req_is_ready_handler() elif path == "/test_ip": return self.req_test_ip_handler() elif path == "/check_ip": return self.req_check_ip_handler() elif path == "/quit": connect_control.keep_running = False data = "Quit" self.wfile.write(( 'HTTP/1.1 200\r\nContent-Type: %s\r\nContent-Length: %s\r\n\r\n' % ('text/plain', len(data))).encode()) self.wfile.write(data) #sys.exit(0) #quit() #os._exit(0) return elif path.startswith("/wizard/"): file_path = os.path.abspath( os.path.join(web_ui_path, '/'.join(path.split('/')[1:]))) if not os.path.isfile(file_path): self.wfile.write(b'HTTP/1.1 404 Not Found\r\n\r\n') xlog.warn('%s %s %s wizard file %s not found', self.address_string(), self.command, self.path, file_path) return if file_path.endswith('.html'): mimetype = 'text/html' elif file_path.endswith('.png'): mimetype = 'image/png' elif file_path.endswith('.jpg') or file_path.endswith('.jpeg'): mimetype = 'image/jpeg' else: mimetype = 'application/octet-stream' self.send_file(file_path, mimetype) return else: xlog.warn('Control Req %s %s %s ', self.address_string(), self.command, self.path) # check for '..', which will leak file if re.search(r'(\.{2})', self.path) is not None: self.wfile.write(b'HTTP/1.1 404\r\n\r\n') xlog.warn('%s %s %s haking', self.address_string(), self.command, self.path) return filename = os.path.normpath('./' + path) if self.path.startswith(('http://', 'https://')): data = b'HTTP/1.1 200\r\nCache-Control: max-age=86400\r\nExpires:Oct, 01 Aug 2100 00:00:00 GMT\r\nConnection: close\r\n' data += b'\r\n' self.wfile.write(data) xlog.info('%s "%s %s HTTP/1.1" 200 -', self.address_string(), self.command, self.path) elif os.path.isfile(filename): if filename.endswith('.pac'): mimetype = 'text/plain' else: mimetype = 'application/octet-stream' #self.send_file(filename, mimetype) else: self.wfile.write( b'HTTP/1.1 404\r\nContent-Type: text/plain\r\nConnection: close\r\n\r\n404 Not Found' ) xlog.info('%s "%s %s HTTP/1.1" 404 -', self.address_string(), self.command, self.path)
def fetch(self): response_headers = dict( (k.title(), v) for k, v in self.response.getheaders()) content_range = response_headers['Content-Range'] start, end, length = tuple( int(x) for x in re.search(r'bytes (\d+)-(\d+)/(\d+)', content_range).group(1, 2, 3)) if start == 0: response_headers['Content-Length'] = str(length) del response_headers['Content-Range'] else: response_headers['Content-Range'] = 'bytes %s-%s/%s' % (start, end, length) response_headers['Content-Length'] = str(length - start) xlog.info('>>>>>>>>>>>>>>> RangeFetch started(%r) %d-%d', self.url, start, end) try: self.wfile.write("HTTP/1.1 200 OK\r\n") for key in response_headers: if key == 'Transfer-Encoding': continue if key == 'X-Head-Content-Length': continue if key in skip_headers: continue value = response_headers[key] #logging.debug("Head %s: %s", key.title(), value) send_header(self.wfile, key, value) self.wfile.write("\r\n") except Exception as e: self._stopped = True xlog.warn("RangeFetch send response fail:%r %s", e, self.url) return data_queue = Queue.PriorityQueue() range_queue = Queue.PriorityQueue() range_queue.put((start, end, self.response)) self.expect_begin = start for begin in range(end + 1, length, self.maxsize): range_queue.put((begin, min(begin + self.maxsize - 1, length - 1), None)) for i in xrange(0, self.threads): range_delay_size = i * self.maxsize spawn_later( float(range_delay_size) / self.waitsize, self.__fetchlet, range_queue, data_queue, range_delay_size) has_peek = hasattr(data_queue, 'peek') peek_timeout = 120 while self.expect_begin < length - 1: try: if has_peek: begin, data = data_queue.peek(timeout=peek_timeout) if self.expect_begin == begin: data_queue.get() elif self.expect_begin < begin: time.sleep(0.1) continue else: xlog.error( 'RangeFetch Error: begin(%r) < expect_begin(%r), quit.', begin, self.expect_begin) break else: begin, data = data_queue.get(timeout=peek_timeout) if self.expect_begin == begin: pass elif self.expect_begin < begin: data_queue.put((begin, data)) time.sleep(0.1) continue else: xlog.error( 'RangeFetch Error: begin(%r) < expect_begin(%r), quit.', begin, self.expect_begin) break except Queue.Empty: xlog.error('data_queue peek timeout, break') break try: ret = self.wfile.write(data) if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ: xlog.debug("send to browser wfile.write ret:%d, retry", ret) ret = self.wfile.write(data) xlog.debug("send to browser wfile.write ret:%d", ret) self.expect_begin += len(data) del data except Exception as e: xlog.warn('RangeFetch client closed(%s). %s', e, self.url) break self._stopped = True
def load(self): """load config from proxy.ini""" current_path = os.path.dirname(os.path.abspath(__file__)) ConfigParser.RawConfigParser.OPTCRE = re.compile( r'(?P<option>[^=\s][^=]*)\s*(?P<vi>[=])\s*(?P<value>.*)$') self.CONFIG = ConfigParser.ConfigParser() self.CONFIG_FILENAME = os.path.abspath( os.path.join(current_path, 'proxy.ini')) self.DATA_PATH = os.path.abspath( os.path.join(current_path, os.pardir, os.pardir, 'data', 'gae_proxy')) if not os.path.isdir(self.DATA_PATH): self.DATA_PATH = current_path # load ../../../data/gae_proxy/config.ini, set by web_ui self.CONFIG_USER_FILENAME = os.path.abspath( os.path.join(self.DATA_PATH, 'config.ini')) self.CONFIG.read(self.CONFIG_FILENAME) if os.path.isfile(self.CONFIG_USER_FILENAME): with open(self.CONFIG_USER_FILENAME, 'rb') as fp: content = fp.read() self.CONFIG.readfp(io.BytesIO(content)) # load ../../../data/gae_proxy/manual.ini, set by manual self.CONFIG_MANUAL_FILENAME = os.path.abspath( os.path.join(self.DATA_PATH, 'manual.ini')) if os.path.isfile(self.CONFIG_MANUAL_FILENAME): with open(self.CONFIG_MANUAL_FILENAME, 'rb') as fp: content = fp.read() try: self.CONFIG.readfp(io.BytesIO(content)) xlog.info("load manual.ini success") except Exception as e: xlog.exception("data/gae_proxy/manual.ini load error:%s", e) self.LISTEN_IP = self.CONFIG.get('listen', 'ip') self.LISTEN_PORT = self.CONFIG.getint('listen', 'port') self.LISTEN_VISIBLE = self.CONFIG.getint('listen', 'visible') self.LISTEN_DEBUGINFO = self.CONFIG.getint('listen', 'debuginfo') self.GAE_APPIDS = re.findall( r'[\w\-\.]+', self.CONFIG.get('gae', 'appid').replace('.appspot.com', '')) self.GAE_PASSWORD = self.CONFIG.get('gae', 'password').strip() fwd_endswith = [] fwd_hosts = [] direct_endswith = [] direct_hosts = [] gae_endswith = [] gae_hosts = [] for k, v in self.CONFIG.items('hosts'): if v == "fwd": if k.startswith('.'): fwd_endswith.append(k) else: fwd_hosts.append(k) elif v == "direct": if k.startswith('.'): direct_endswith.append(k) else: direct_hosts.append(k) elif v == "gae": if k.startswith('.'): gae_endswith.append(k) else: gae_hosts.append(k) self.HOSTS_FWD_ENDSWITH = tuple(fwd_endswith) self.HOSTS_FWD = tuple(fwd_hosts) self.HOSTS_GAE_ENDSWITH = tuple(gae_endswith) self.HOSTS_GAE = tuple(gae_hosts) self.HOSTS_DIRECT_ENDSWITH = tuple(direct_endswith) self.HOSTS_DIRECT = tuple(direct_hosts) self.AUTORANGE_MAXSIZE = self.CONFIG.getint('autorange', 'maxsize') self.AUTORANGE_WAITSIZE = self.CONFIG.getint('autorange', 'waitsize') self.AUTORANGE_BUFSIZE = self.CONFIG.getint('autorange', 'bufsize') self.AUTORANGE_THREADS = self.CONFIG.getint('autorange', 'threads') self.PAC_ENABLE = self.CONFIG.getint('pac', 'enable') self.PAC_IP = self.CONFIG.get('pac', 'ip') self.PAC_PORT = self.CONFIG.getint('pac', 'port') self.PAC_FILE = self.CONFIG.get('pac', 'file').lstrip('/') self.PAC_GFWLIST = self.CONFIG.get('pac', 'gfwlist') self.PAC_ADBLOCK = self.CONFIG.get( 'pac', 'adblock') if self.CONFIG.has_option('pac', 'adblock') else '' self.PAC_EXPIRED = self.CONFIG.getint('pac', 'expired') self.pac_url = 'http://%s:%d/%s\n' % (self.PAC_IP, self.PAC_PORT, self.PAC_FILE) self.CONTROL_ENABLE = self.CONFIG.getint('control', 'enable') self.CONTROL_IP = self.CONFIG.get('control', 'ip') self.CONTROL_PORT = self.CONFIG.getint('control', 'port') self.PROXY_ENABLE = self.CONFIG.getint('proxy', 'enable') self.PROXY_TYPE = self.CONFIG.get('proxy', 'type') self.PROXY_HOST = self.CONFIG.get('proxy', 'host') self.PROXY_PORT = self.CONFIG.get('proxy', 'port') if self.PROXY_PORT == "": self.PROXY_PORT = 0 else: self.PROXY_PORT = int(self.PROXY_PORT) self.PROXY_USER = self.CONFIG.get('proxy', 'user') self.PROXY_PASSWD = self.CONFIG.get('proxy', 'passwd') self.LOVE_ENABLE = self.CONFIG.getint('love', 'enable') self.LOVE_TIP = self.CONFIG.get( 'love', 'tip').encode('utf8').decode('unicode-escape').split('|') self.USE_IPV6 = self.CONFIG.getint('google_ip', 'use_ipv6') self.ip_traffic_quota = self.CONFIG.getint('google_ip', 'ip_traffic_quota') self.ip_traffic_quota_base = self.CONFIG.getint( 'google_ip', 'ip_traffic_quota_base') self.max_links_per_ip = self.CONFIG.getint('google_ip', 'max_links_per_ip') self.record_ip_history = self.CONFIG.getint('google_ip', 'record_ip_history') self.https_max_connect_thread = config.CONFIG.getint( "connect_manager", "https_max_connect_thread") self.connect_interval = config.CONFIG.getint("connect_manager", "connect_interval") # change to True when finished import CA cert to browser # launcher will wait import ready then open browser to show status, check update etc self.cert_import_ready = False
wfile.write('0\r\n\r\n') break length += len(data) wfile.write('%x\r\n' % len(data)) wfile.write(data) wfile.write('\r\n') except Exception as e: send_to_browser = False xlog.warn("direct_handler.handler send Transfer-Encoding t:%d e:%r %s/%s", time.time()-time_request, e, host, url) else: if not data: break https_manager.save_ssl_connection_for_reuse(response.ssl_sock, host) response.close() xlog.info("DIRECT chucked t:%d s:%d %d %s %s", (time.time()-time_request)*1000, length, response.status, host, url) return content_length = int(response.getheader('Content-Length', 0)) content_range = response.getheader('Content-Range', '') if content_range: start, end, length = tuple(int(x) for x in re.search(r'bytes (\d+)-(\d+)/(\d+)', content_range).group(1, 2, 3)) else: start, end, length = 0, content_length-1, content_length time_last_read = time.time() while True: if start > end: https_manager.save_ssl_connection_for_reuse(response.ssl_sock, host, call_time=time_request) xlog.info("DIRECT t:%d s:%d %d %s %s", (time.time()-time_request)*1000, length, response.status, host, url) return
def handler(method, host, url, headers, body, wfile): time_request = time.time() if "Connection" in headers and headers["Connection"] == "close": del headers["Connection"] errors = [] response = None while True: if time.time() - time_request > 30: return return_fail_message(wfile) try: response = fetch(method, host, url, headers, body) if response: if response.status > 400: server_type = response.getheader('Server', "") if "gws" not in server_type and "Google Frontend" not in server_type: xlog.warn("IP:%s not support GAE, server type:%s status:%d", response.ssl_sock.ip, server_type, response.status) google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True) response.close() continue break except OpenSSL.SSL.SysCallError as e: errors.append(e) xlog.warn("direct_handler.handler err:%r %s/%s", e, host, url) except Exception as e: errors.append(e) xlog.exception('direct_handler.handler %r %s %s , retry...', e, host, url) try: send_to_browser = True try: response_headers = dict((k.title(), v) for k, v in response.getheaders()) wfile.write("HTTP/1.1 %d %s\r\n" % (response.status, response.reason)) for key, value in response.getheaders(): send_header(wfile, key, value) wfile.write("\r\n") except Exception as e: send_to_browser = False wait_time = time.time()-time_request xlog.warn("direct_handler.handler send response fail. t:%d e:%r %s%s", wait_time, e, host, url) if method == 'HEAD' or response.status in (204, 304): xlog.info("DIRECT t:%d %d %s %s", (time.time()-time_request)*1000, response.status, host, url) https_manager.save_ssl_connection_for_reuse(response.ssl_sock, host) response.close() return if 'Transfer-Encoding' in response_headers: length = 0 while True: try: data = response.read(8192) except httplib.IncompleteRead, e: data = e.partial except Exception as e: google_ip.report_connect_closed(response.ssl_sock.ip, "receive fail") xlog.warn("direct_handler.handler send Transfer-Encoding t:%d e:%r %s/%s", time.time()-time_request, e, host, url) response.close() return if send_to_browser: try: if not data: wfile.write('0\r\n\r\n') break length += len(data) wfile.write('%x\r\n' % len(data)) wfile.write(data) wfile.write('\r\n') except Exception as e: send_to_browser = False xlog.warn("direct_handler.handler send Transfer-Encoding t:%d e:%r %s/%s", time.time()-time_request, e, host, url) else: if not data: break
def do_GET(self): path = urlparse.urlparse(self.path).path if path == "/log": return self.req_log_handler() elif path == "/status": return self.req_status_handler() else: xlog.debug('GAEProxy Web_control %s %s %s ', self.address_string(), self.command, self.path) if path == '/deploy': return self.req_deploy_handler() elif path == "/config": return self.req_config_handler() elif path == "/ip_list": return self.req_ip_list_handler() elif path == "/scan_ip": return self.req_scan_ip_handler() elif path == "/ssl_pool": return self.req_ssl_pool_handler() elif path == "/download_cert": return self.req_download_cert_handler() elif path == "/is_ready": return self.req_is_ready_handler() elif path == "/test_ip": return self.req_test_ip_handler() elif path == "/check_ip": return self.req_check_ip_handler() elif path == "/quit": connect_control.keep_running = False data = "Quit" self.wfile.write(('HTTP/1.1 200\r\nContent-Type: %s\r\nContent-Length: %s\r\n\r\n' % ('text/plain', len(data))).encode()) self.wfile.write(data) #sys.exit(0) #quit() #os._exit(0) return elif path.startswith("/wizard/"): file_path = os.path.abspath(os.path.join(web_ui_path, '/'.join(path.split('/')[1:]))) if not os.path.isfile(file_path): self.wfile.write(b'HTTP/1.1 404 Not Found\r\n\r\n') xlog.warn('%s %s %s wizard file %s not found', self.address_string(), self.command, self.path, file_path) return if file_path.endswith('.html'): mimetype = 'text/html' elif file_path.endswith('.png'): mimetype = 'image/png' elif file_path.endswith('.jpg') or file_path.endswith('.jpeg'): mimetype = 'image/jpeg' else: mimetype = 'application/octet-stream' self.send_file(file_path, mimetype) return else: xlog.warn('Control Req %s %s %s ', self.address_string(), self.command, self.path) # check for '..', which will leak file if re.search(r'(\.{2})', self.path) is not None: self.wfile.write(b'HTTP/1.1 404\r\n\r\n') xlog.warn('%s %s %s haking', self.address_string(), self.command, self.path ) return filename = os.path.normpath('./' + path) if self.path.startswith(('http://', 'https://')): data = b'HTTP/1.1 200\r\nCache-Control: max-age=86400\r\nExpires:Oct, 01 Aug 2100 00:00:00 GMT\r\nConnection: close\r\n' data += b'\r\n' self.wfile.write(data) xlog.info('%s "%s %s HTTP/1.1" 200 -', self.address_string(), self.command, self.path) elif os.path.isfile(filename): if filename.endswith('.pac'): mimetype = 'text/plain' else: mimetype = 'application/octet-stream' #self.send_file(filename, mimetype) else: self.wfile.write(b'HTTP/1.1 404\r\nContent-Type: text/plain\r\nConnection: close\r\n\r\n404 Not Found') xlog.info('%s "%s %s HTTP/1.1" 404 -', self.address_string(), self.command, self.path)
def load(self): """load config from proxy.ini""" current_path = os.path.dirname(os.path.abspath(__file__)) ConfigParser.RawConfigParser.OPTCRE = re.compile(r'(?P<option>[^=\s][^=]*)\s*(?P<vi>[=])\s*(?P<value>.*)$') self.CONFIG = ConfigParser.ConfigParser() self.CONFIG_FILENAME = os.path.abspath( os.path.join(current_path, 'proxy.ini')) self.DATA_PATH = os.path.abspath( os.path.join(current_path, os.pardir, os.pardir, 'data', 'gae_proxy')) if not os.path.isdir(self.DATA_PATH): self.DATA_PATH = current_path # load ../../../data/gae_proxy/config.ini, set by web_ui self.CONFIG_USER_FILENAME = os.path.abspath( os.path.join(self.DATA_PATH, 'config.ini')) self.CONFIG.read(self.CONFIG_FILENAME) if os.path.isfile(self.CONFIG_USER_FILENAME): with open(self.CONFIG_USER_FILENAME, 'rb') as fp: content = fp.read() self.CONFIG.readfp(io.BytesIO(content)) # load ../../../data/gae_proxy/manual.ini, set by manual self.CONFIG_MANUAL_FILENAME = os.path.abspath( os.path.join(self.DATA_PATH, 'manual.ini')) if os.path.isfile(self.CONFIG_MANUAL_FILENAME): with open(self.CONFIG_MANUAL_FILENAME, 'rb') as fp: content = fp.read() try: self.CONFIG.readfp(io.BytesIO(content)) xlog.info("load manual.ini success") except Exception as e: xlog.exception("data/gae_proxy/manual.ini load error:%s", e) self.LISTEN_IP = self.CONFIG.get('listen', 'ip') self.LISTEN_PORT = self.CONFIG.getint('listen', 'port') self.LISTEN_VISIBLE = self.CONFIG.getint('listen', 'visible') self.LISTEN_DEBUGINFO = self.CONFIG.getint('listen', 'debuginfo') self.GAE_APPIDS = re.findall(r'[\w\-\.]+', self.CONFIG.get('gae', 'appid').replace('.appspot.com', '')) self.GAE_PASSWORD = self.CONFIG.get('gae', 'password').strip() fwd_endswith = [] fwd_hosts = [] direct_endswith = [] direct_hosts = [] gae_endswith = [] gae_hosts = [] for k, v in self.CONFIG.items('hosts'): if v == "fwd": if k.startswith('.'): fwd_endswith.append(k) else: fwd_hosts.append(k) elif v == "direct": if k.startswith('.'): direct_endswith.append(k) else: direct_hosts.append(k) elif v == "gae": if k.startswith('.'): gae_endswith.append(k) else: gae_hosts.append(k) self.HOSTS_FWD_ENDSWITH = tuple(fwd_endswith) self.HOSTS_FWD = tuple(fwd_hosts) self.HOSTS_GAE_ENDSWITH = tuple(gae_endswith) self.HOSTS_GAE = tuple(gae_hosts) self.HOSTS_DIRECT_ENDSWITH = tuple(direct_endswith) self.HOSTS_DIRECT = tuple(direct_hosts) self.AUTORANGE_MAXSIZE = self.CONFIG.getint('autorange', 'maxsize') self.AUTORANGE_WAITSIZE = self.CONFIG.getint('autorange', 'waitsize') self.AUTORANGE_BUFSIZE = self.CONFIG.getint('autorange', 'bufsize') self.AUTORANGE_THREADS = self.CONFIG.getint('autorange', 'threads') self.PAC_ENABLE = self.CONFIG.getint('pac', 'enable') self.PAC_IP = self.CONFIG.get('pac', 'ip') self.PAC_PORT = self.CONFIG.getint('pac', 'port') self.PAC_FILE = self.CONFIG.get('pac', 'file').lstrip('/') self.PAC_GFWLIST = self.CONFIG.get('pac', 'gfwlist') self.PAC_ADBLOCK = self.CONFIG.get('pac', 'adblock') if self.CONFIG.has_option('pac', 'adblock') else '' self.PAC_EXPIRED = self.CONFIG.getint('pac', 'expired') self.pac_url = 'http://%s:%d/%s\n' % (self.PAC_IP, self.PAC_PORT, self.PAC_FILE) self.CONTROL_ENABLE = self.CONFIG.getint('control', 'enable') self.CONTROL_IP = self.CONFIG.get('control', 'ip') self.CONTROL_PORT = self.CONFIG.getint('control', 'port') self.PROXY_ENABLE = self.CONFIG.getint('proxy', 'enable') self.PROXY_TYPE = self.CONFIG.get('proxy', 'type') self.PROXY_HOST = self.CONFIG.get('proxy', 'host') self.PROXY_PORT = self.CONFIG.get('proxy', 'port') if self.PROXY_PORT == "": self.PROXY_PORT = 0 else: self.PROXY_PORT = int(self.PROXY_PORT) self.PROXY_USER = self.CONFIG.get('proxy', 'user') self.PROXY_PASSWD = self.CONFIG.get('proxy', 'passwd') self.LOVE_ENABLE = self.CONFIG.getint('love', 'enable') self.LOVE_TIP = self.CONFIG.get('love', 'tip').encode('utf8').decode('unicode-escape').split('|') self.USE_IPV6 = self.CONFIG.getint('google_ip', 'use_ipv6') self.ip_traffic_quota = self.CONFIG.getint('google_ip', 'ip_traffic_quota') self.ip_traffic_quota_base = self.CONFIG.getint('google_ip', 'ip_traffic_quota_base') self.max_links_per_ip = self.CONFIG.getint('google_ip', 'max_links_per_ip') self.record_ip_history = self.CONFIG.getint('google_ip', 'record_ip_history') self.https_max_connect_thread = config.CONFIG.getint("connect_manager", "https_max_connect_thread") self.connect_interval = config.CONFIG.getint("connect_manager", "connect_interval") # change to True when finished import CA cert to browser # launcher will wait import ready then open browser to show status, check update etc self.cert_import_ready = False
def generate_ca_file(): xlog.info("generate CA file:%s", CertUtil.ca_keyfile) key, ca = CertUtil.create_ca() with open(CertUtil.ca_keyfile, 'wb') as fp: fp.write(OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, ca)) fp.write(OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, key))