def parse_range_string(input_lines): ip_range_list = [] ip_lines_list = re.split("\r|\n", input_lines) for raw_line in ip_lines_list: raw_s = raw_line.split("#") context_line = raw_s[0] context_line = context_line.replace(' ', '') ips = re.split(",|\|", context_line) for line in ips: if len(line) == 0: #print "non line:", line continue begin, end = ip_utils.split_ip(line) if ip_utils.check_ip_valid(begin) == 0 or ip_utils.check_ip_valid( end) == 0: PRINT("ip format is error,line:%s, begin: %s,end: %s" % (line, begin, end)) continue nbegin = ip_utils.ip_string_to_num(begin) nend = ip_utils.ip_string_to_num(end) ip_range_list.append([nbegin, nend]) #print begin, end ip_range_list.sort() return ip_range_list
def parse_range_string(input_lines): ip_range_list = [] ip_lines_list = re.split("\r|\n", input_lines) for raw_line in ip_lines_list: raw_s = raw_line.split("#") context_line = raw_s[0] context_line = context_line.replace(' ', '') ips = re.split(",|\|", context_line) for line in ips: if len(line) == 0: #print "non line:", line continue begin, end = ip_utils.split_ip(line) if ip_utils.check_ip_valid(begin) == 0 or ip_utils.check_ip_valid(end) == 0: print("ip format is error,line:%s, begin: %s,end: %s" % (line, begin, end)) continue nbegin = ip_utils.ip_string_to_num(begin) nend = ip_utils.ip_string_to_num(end) ip_range_list.append([nbegin,nend]) #print begin, end ip_range_list.sort() return ip_range_list
def _write_SOCKS5_address(self, addr, file): """ Return the host and port packed for the SOCKS5 protocol, and the resolved address as a tuple object. """ host, port = addr proxy_type, _, _, rdns, username, password = self.proxy if ":" in host: addr_bytes = socket.inet_pton(socket.AF_INET6, host) file.write(b"\x04" + addr_bytes) elif check_ip_valid(host): addr_bytes = socket.inet_pton(socket.AF_INET, host) file.write(b"\x01" + addr_bytes) else: if rdns: # Resolve remotely host_bytes = host.encode('idna') file.write(b"\x03" + chr(len(host_bytes)).encode() + host_bytes) else: # Resolve locally addr_bytes = socket.inet_aton(socket.gethostbyname(host)) file.write(b"\x01" + addr_bytes) host = socket.inet_ntoa(addr_bytes) file.write(struct.pack(">H", port)) return host, port
def dig_ip_worker(self): try: for ip in self.dig_ips: if ip in self.dig_ipdict or ip in self.dig_finishedip: continue self.dig_ipdict.append(ip) if not check_ip_valid(ip): print('ip: %s is invalid, reset to default ip: %s' % (ip, default_ip)) ip = default_ip print('\ndig ip: %s' % ip) cmd = ['1', '+subnet=%s/32' % ip, '@ns1.google.com', 'www.google.com'] code = pydig(cmd) self.dig_lock.acquire() if code == 502: open(dig_error, "a").write(ip + "\n") else: open(dig_finished, "a").write(ip + "\n") self.dig_lock.release() except: pass finally: self.dig_lock.acquire() print 'dig_ip_worker exit' self.dig_thread_num -= 1 self.dig_lock.release()
def __init__(self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None): if type not in {socket.SOCK_STREAM, socket.SOCK_DGRAM}: msg = "Socket type must be stream or datagram, not {!r}" raise ValueError(msg.format(type)) self._proxyconn = None # TCP connection to keep UDP relay alive if self.default_proxy: self.proxy = self.default_proxy proxy_host = self.proxy[1] if ":" in proxy_host: family = socket.AF_INET6 elif check_ip_valid(proxy_host): family = socket.AF_INET else: self.proxy = (None, None, None, None, None, None) _BaseSocket.__init__(self, family, type, proto, _sock) self.proxy_sockname = None self.proxy_peername = None
def req_importip_handler(self): req = urlparse.urlparse(self.path).query reqs = urlparse.parse_qs(req, keep_blank_values=True) data = '' if reqs['cmd'] == ['importip']: count = 0 ip_list = self.postvars['ipList'][0] addresses = ip_list.split('|') for ip in addresses: if not ip_utils.check_ip_valid(ip): continue if google_ip.add_ip(ip, 100, "google.com", "gws"): count += 1 data = '{"res":"%s"}' % count google_ip.save_ip_list(force=True) elif reqs['cmd'] == ['exportip']: data = '{"res":"' for ip in google_ip.gws_ip_list: if google_ip.ip_dict[ip]['fail_times'] > 0: continue data += "%s|" % ip data = data[0:len(data) - 1] data += '"}' self.send_response_nc('text/html', data)
def report_bad_ip(self, ip_str): logging.debug("report_bad_ip %s", ip_str) if not ip_utils.check_ip_valid(ip_str): return self.bad_ip_pool.add(ip_str) self.save_ip_list(force=True)
def dig_ip(ip): if not check_ip_valid(ip): print('ip: %s is invalid, reset to default ip: %s' % (ip, default_ip)) ip = default_ip print('\ndig ip: %s' % ip) cmd = ['', '+subnet=%s/32' % ip, '@ns1.google.com', 'www.google.com'] pydig(cmd)
def direct_connect(self, host, port): connect_timeout = 30 if ':' in host: info = [(socket.AF_INET6, socket.SOCK_STREAM, 0, "", (host, port, 0, 0))] elif ip_utils.check_ip_valid(host): info = [(socket.AF_INET, socket.SOCK_STREAM, 0, "", (host, port))] else: try: info = socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket.SOCK_STREAM) except socket.gaierror: info = [(socket.AF_INET, socket.SOCK_STREAM, 0, "", (host, port))] for res in info: af, socktype, proto, canonname, sa = res s = None try: s = socket.socket(af, socktype, proto) # See http://groups.google.com/group/cherrypy-users/ # browse_frm/thread/bbfe5eb39c904fe0 s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) s.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 32 * 1024) s.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, True) s.settimeout(connect_timeout) s.connect((host, port)) return s except socket.error: if s: s.close() return None
def report_bad_ip(self, ip_str): xlog.debug("report_bad_ip %s", ip_str) if not ip_utils.check_ip_valid(ip_str): return self.bad_ip_pool.add(ip_str) self.save_ip_list(force=True)
def req_importip_handler(self): req = urlparse.urlparse(self.path).query reqs = urlparse.parse_qs(req, keep_blank_values=True) data = "" if reqs["cmd"] == ["importip"]: count = 0 ip_list = self.postvars["ipList"][0] addresses = ip_list.split("|") for ip in addresses: if not ip_utils.check_ip_valid(ip): continue if google_ip.add_ip(ip, 100, "google.com", "gws"): count += 1 data = '{"res":"%s"}' % count google_ip.save_ip_list(force=True) elif reqs["cmd"] == ["exportip"]: data = '{"res":"' for ip in google_ip.gws_ip_list: data += "%s|" % ip data = data[0 : len(data) - 1] data += '"}' self.send_response("text/html", data)
def req_importip_handler(self): req = urlparse.urlparse(self.path).query reqs = urlparse.parse_qs(req, keep_blank_values=True) data = '' if reqs['cmd'] == ['importip']: count = 0 ip_list = self.postvars['ipList'][0] addresses = ip_list.split('|') for ip in addresses: if not ip_utils.check_ip_valid(ip): continue if google_ip.add_ip(ip, 100, "google.com", "gws"): count += 1 data = '{"res":"%s"}' % count google_ip.save_ip_list(force=True) elif reqs['cmd'] == ['exportip']: data = '{"res":"' for ip in google_ip.gws_ip_list: if google_ip.ip_dict[ip]['fail_times'] > 0: continue data += "%s|" % ip data = data[0: len(data) - 1] data += '"}' self.send_response('text/html', data)
def dig_ip(ip): if not check_ip_valid(ip): print('ip: %s is invalid, reset to default ip: %s' % (ip, default_ip)) ip = default_ip print('\ndig ip: %s' % ip) cmd = ['1', '+subnet=%s/32' % ip, '@ns1.google.com', 'www.google.com'] code = pydig(cmd) if code == 502: open(dig_error, "a").write(ip + "\n") else: open(dig_finished, "a").write(ip + "\n")
def get_cert(commonname, sans=None, full_name=False): isip = check_ip_valid(commonname) with CertUtil.ca_lock: certfile = CertUtil._get_old_cert(commonname) if certfile: return certfile # some site need full name cert # like https://about.twitter.com in Google Chrome if not isip and not full_name and commonname.count('.') >= 2 and [len(x) for x in reversed(commonname.split('.'))] > [2, 4]: commonname = commonname.partition('.')[-1] certfile = CertUtil._get_old_cert(commonname) if certfile: return certfile return CertUtil._get_cert(commonname, isip, sans)
def load_ip(self): if os.path.isfile(good_ip_file): file_path = good_ip_file else: file_path = default_good_ip_file with open(file_path, "r") as fd: lines = fd.readlines() for line in lines: try: str_l = line.split(' ') if len(str_l) != 4: logging.warning("line err: %s", line) continue ip_str = str_l[0] domain = str_l[1] server = str_l[2] handshake_time = int(str_l[3]) #logging.info("load ip: %s time:%d domain:%s server:%s", ip_str, handshake_time, domain, server) self.add_ip(ip_str, handshake_time, domain, server) except Exception as e: logging.exception("load_ip line:%s err:%s", line, e) logging.info("load google ip_list num:%d, gws num:%d", len(self.ip_dict), len(self.gws_ip_list)) self.try_sort_ip_by_handshake_time(force=True) if os.path.isfile(bad_ip_file): with open(bad_ip_file, "r") as fd: for line in fd.readlines(): try: if line == "\n": continue str_l = line.replace('\n', '') if not ip_utils.check_ip_valid(str_l): logging.warning("bad_ip line err: %s", line) continue ip = str_l[1] self.bad_ip_pool.add(ip) except Exception as e: logging.exception("parse bad_ip.txt err:%r", e) if False: p = threading.Thread(target=self.check_exist_ip) p.daemon = True p.start()
def load_ip(self): if os.path.isfile(good_ip_file): file_path = good_ip_file else: file_path = default_good_ip_file with open(file_path, "r") as fd: lines = fd.readlines() for line in lines: try: str_l = line.split(' ') if len(str_l) != 4: logging.warning("line err: %s", line) continue ip_str = str_l[0] domain = str_l[1] server = str_l[2] handshake_time = int(str_l[3]) #logging.info("load ip: %s time:%d domain:%s server:%s", ip_str, handshake_time, domain, server) self.add_ip(ip_str, handshake_time, domain, server) except Exception as e: logging.exception("load_ip line:%s err:%s", line, e) logging.info("load google ip_list num:%d, gws num:%d", len(self.ip_dict), len(self.gws_ip_list)) self.try_sort_ip_by_handshake_time(force=True) if os.path.isfile(bad_ip_file): with open(bad_ip_file, "r") as fd: for line in fd.readlines(): try: if line == "\n": continue str_l = line.replace('\n', '') if not ip_utils.check_ip_valid(str_l): logging.warning("bad_ip line err: %s", line) continue ip = str_l[1] self.bad_ip_pool.add(ip) except Exception as e: logging.exception("parse bad_ip.txt err:%r", e) if False: p = threading.Thread(target = self.check_exist_ip) p.daemon = True p.start()
def __init__(self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None): if type not in {socket.SOCK_STREAM, socket.SOCK_DGRAM}: msg = "Socket type must be stream or datagram, not {!r}" raise ValueError(msg.format(type)) self._proxyconn = None # TCP connection to keep UDP relay alive if self.default_proxy: self.proxy = self.default_proxy proxy_host = self.proxy[1] if ":" in proxy_host: family=socket.AF_INET6 elif check_ip_valid(proxy_host): family=socket.AF_INET else: self.proxy = (None, None, None, None, None, None) _BaseSocket.__init__(self, family, type, proto, _sock) self.proxy_sockname = None self.proxy_peername = None
def load_ip_blacklist(self): xlog.info("load user ip blacklist file:%s",self.user_ip_blacklist_file) with open(self.user_ip_blacklist_file,"r") as fd: self.ip_blacklist=[ip for ip in fd.readlines() if ip_utils.check_ip_valid(ip)]
except Exception as e: xlog.exception("check fail:%r", e) return False else: return check_xtunnel_http2(ssl_sock, host) if __name__ == "__main__": # case 1: only ip # case 2: ip + domain # connect use domain, print altNames top_domain = None if len(sys.argv) > 1: ip = sys.argv[1] if not ip_utils.check_ip_valid(ip): ip = "104.28.100.89" top_domain = sys.argv[1] xlog.info("test domain:%s", top_domain) else: ip = "104.28.100.89" print("Usage: check_ip.py [ip] [top_domain] [wait_time=0]") xlog.info("test ip:%s", ip) if len(sys.argv) > 2: top_domain = sys.argv[2] xlog.info("test top domain:%s", top_domain) res = test_xtunnel_ip2(ip, top_domain=top_domain) if not res:
def merge_ip_range(): ip_range_list = [] ip_lines_list = re.split("\r|\n", ip_str_list) for iplines in ip_lines_list: if len(iplines) == 0 or iplines[0] == '#': #print "non:", iplines continue ips = re.split(",|\|", iplines) for line in ips: if len(line) == 0 or line[0] == '#': #print "non line:", line continue begin, end = ip_utils.split_ip(line) if ip_utils.check_ip_valid(begin) == 0 or ip_utils.check_ip_valid( end) == 0: PRINT("ip format is error,line:%s, begin: %s,end: %s" % (line, begin, end)) continue nbegin = ip_utils.ip_string_to_num(begin) nend = ip_utils.ip_string_to_num(end) ip_range_list.append([nbegin, nend]) #print begin, end ip_range_list.sort() # merge range ip_range_list_2 = [] range_num = len(ip_range_list) last_begin = ip_range_list[0][0] last_end = ip_range_list[0][1] for i in range(1, range_num - 1): ip_range = ip_range_list[i] begin = ip_range[0] end = ip_range[1] #print "now:",ip_utils.ip_num_to_string(begin), ip_utils.ip_num_to_string(end) if begin > last_end + 2: #print "add:",ip_utils.ip_num_to_string(begin), ip_utils.ip_num_to_string(end) ip_range_list_2.append([last_begin, last_end]) last_begin = begin last_end = end else: print "merge:", ip_utils.ip_num_to_string( last_begin), ip_utils.ip_num_to_string( last_end), ip_utils.ip_num_to_string( begin), ip_utils.ip_num_to_string(end) if end > last_end: last_end = end ip_range_list_2.append([last_begin, last_end]) for ip_range in ip_range_list_2: begin = ip_range[0] end = ip_range[1] print ip_utils.ip_num_to_string(begin), ip_utils.ip_num_to_string(end) # write out fd = open("ip_range.txt", "w") for ip_range in ip_range_list_2: begin = ip_range[0] end = ip_range[1] #print ip_utils.ip_num_to_string(begin), ip_utils.ip_num_to_string(end) fd.write( ip_utils.ip_num_to_string(begin) + "-" + ip_utils.ip_num_to_string(end) + "\n") fd.close()
def merge_ip_range(): ip_range_list = [] ip_lines_list = re.split("\r|\n", ip_str_list) for iplines in ip_lines_list: if len(iplines) == 0 or iplines[0] == "#": # print "non:", iplines continue ips = re.split(",|\|", iplines) for line in ips: if len(line) == 0 or line[0] == "#": # print "non line:", line continue begin, end = ip_utils.split_ip(line) if ip_utils.check_ip_valid(begin) == 0 or ip_utils.check_ip_valid(end) == 0: PRINT("ip format is error,line:%s, begin: %s,end: %s" % (line, begin, end)) continue nbegin = ip_utils.ip_string_to_num(begin) nend = ip_utils.ip_string_to_num(end) ip_range_list.append([nbegin, nend]) # print begin, end ip_range_list.sort() # merge range ip_range_list_2 = [] range_num = len(ip_range_list) last_begin = ip_range_list[0][0] last_end = ip_range_list[0][1] for i in range(1, range_num - 1): ip_range = ip_range_list[i] begin = ip_range[0] end = ip_range[1] # print "now:",ip_utils.ip_num_to_string(begin), ip_utils.ip_num_to_string(end) if begin > last_end + 2: # print "add:",ip_utils.ip_num_to_string(begin), ip_utils.ip_num_to_string(end) ip_range_list_2.append([last_begin, last_end]) last_begin = begin last_end = end else: print "merge:", ip_utils.ip_num_to_string(last_begin), ip_utils.ip_num_to_string( last_end ), ip_utils.ip_num_to_string(begin), ip_utils.ip_num_to_string(end) if end > last_end: last_end = end ip_range_list_2.append([last_begin, last_end]) for ip_range in ip_range_list_2: begin = ip_range[0] end = ip_range[1] print ip_utils.ip_num_to_string(begin), ip_utils.ip_num_to_string(end) # write out fd = open("ip_range.txt", "w") for ip_range in ip_range_list_2: begin = ip_range[0] end = ip_range[1] # print ip_utils.ip_num_to_string(begin), ip_utils.ip_num_to_string(end) fd.write(ip_utils.ip_num_to_string(begin) + "-" + ip_utils.ip_num_to_string(end) + "\n") fd.close()