コード例 #1
0
def start_connect_register(high_prior=False):
    global high_prior_connecting_num, low_prior_connecting_num, last_connect_time
    if not connect_interval:
        return

    ccc_lock.acquire()
    try:
        if high_prior_connecting_num + low_prior_connecting_num > config.https_max_connect_thread:
            atom_lock = threading.Lock()
            atom_lock.acquire()
            if high_prior:
                high_prior_lock.append(atom_lock)
            else:
                low_prior_lock.append(atom_lock)
            ccc_lock.release()
            atom_lock.acquire()

            ccc_lock.acquire()

        last_connect_interval = time.time() - last_connect_time
        if last_connect_interval < 0:
            xlog.error("last_connect_interval:%f", last_connect_interval)
            return

        if last_connect_interval < connect_interval/1000.0:
            wait_time = connect_interval/1000.0 - last_connect_interval
            time.sleep(wait_time)

        if high_prior:
            high_prior_connecting_num += 1
        else:
            low_prior_connecting_num += 1
    finally:
        last_connect_time = time.time()
        ccc_lock.release()
コード例 #2
0
ファイル: google_ip.py プロジェクト: az0ne/XX-Net
    def add_ip(self, ip_str, handshake_time, domain=None, server=None):
        if not isinstance(ip_str, basestring):
            xlog.error("add_ip input")

        handshake_time = int(handshake_time)

        self.ip_lock.acquire()
        try:
            if ip_str in self.ip_dict:
                self.ip_dict[ip_str]['handshake_time'] = handshake_time
                self.ip_dict[ip_str]['timeout'] = 0
                self.ip_dict[ip_str]['history'].append([time.time(), handshake_time])
                return False

            self.iplist_need_save = 1

            self.ip_dict[ip_str] = {'handshake_time':handshake_time, 'domain':domain, 'server':server,
                                    'timeout':0, "history":[[time.time(), handshake_time]], "fail_time":0,
                                    "get_time":0}

            if 'gws' in server:
                self.gws_ip_list.append(ip_str)
            return True
        except Exception as e:
            xlog.error("set_ip err:%s", e)
        finally:
            self.ip_lock.release()
        return False
コード例 #3
0
    def update_ip(self, ip_str, handshake_time):
        if not isinstance(ip_str, basestring):
            xlog.error("set_ip input")

        handshake_time = int(handshake_time)
        if handshake_time < 5:  # that's impossible
            return

        self.ip_lock.acquire()
        try:
            if ip_str in self.ip_dict:

                # Case: some good ip, average handshake time is 300ms
                # some times ip package lost cause handshake time become 2000ms
                # this ip will not return back to good ip front until all become bad
                # There for, prevent handshake time increase too quickly.
                org_time = self.ip_dict[ip_str]['handshake_time']
                if handshake_time - org_time > 500:
                    self.ip_dict[ip_str]['handshake_time'] = org_time + 500
                else:
                    self.ip_dict[ip_str]['handshake_time'] = handshake_time

                self.ip_dict[ip_str]['timeout'] = 0
                self.ip_dict[ip_str]['history'].append(
                    [time.time(), handshake_time])
                self.ip_dict[ip_str]["fail_time"] = 0
                self.iplist_need_save = 1

            #logging.debug("update ip:%s not exist", ip_str)
        except Exception as e:
            xlog.error("update_ip err:%s", e)
        finally:
            self.ip_lock.release()

        self.save_ip_list()
コード例 #4
0
ファイル: google_ip.py プロジェクト: xiaot1015/XX-Net
    def get_gws_ip(self):
        self.try_sort_gws_ip()

        self.ip_lock.acquire()
        try:
            ip_num = len(self.gws_ip_list)
            if ip_num == 0:
                #logging.warning("no gws ip")
                time.sleep(10)
                return None

            for i in range(ip_num):

                if self.gws_ip_pointer >= ip_num:
                    if time.time() - self.gws_ip_pointer_reset_time < 1:
                        time.sleep(1)
                        continue
                    else:
                        self.gws_ip_pointer = 0
                        self.gws_ip_pointer_reset_time = time.time()
                elif self.gws_ip_pointer > 0 and time.time(
                ) - self.gws_ip_pointer_reset_time > 3:
                    self.gws_ip_pointer = 0
                    self.gws_ip_pointer_reset_time = time.time()

                ip_str = self.gws_ip_list[self.gws_ip_pointer]
                get_time = self.ip_dict[ip_str]["get_time"]
                if time.time() - get_time < self.ip_connect_interval:
                    self.gws_ip_pointer += 1
                    continue

                if time.time(
                ) - self.ip_dict[ip_str]['success_time'] > 300:  # 5 min
                    fail_connect_interval = 1800  # 30 min
                else:
                    fail_connect_interval = 120  # 2 min
                fail_time = self.ip_dict[ip_str]["fail_time"]
                if time.time() - fail_time < fail_connect_interval:
                    self.gws_ip_pointer += 1
                    continue

                time_now = time.time()
                active_time = self.ip_dict[ip_str]['data_active']
                transfered_data = self.ip_dict[ip_str]['transfered_data'] - (
                    (time_now - active_time) * config.ip_traffic_quota)
                if transfered_data > config.ip_traffic_quota_base:
                    self.gws_ip_pointer += 1
                    continue

                handshake_time = self.ip_dict[ip_str]["handshake_time"]
                xlog.debug("get ip:%s t:%d", ip_str, handshake_time)
                self.ip_dict[ip_str]['history'].append([time.time(), "get"])
                self.ip_dict[ip_str]['get_time'] = time.time()
                self.gws_ip_pointer += 1
                return ip_str
        except Exception as e:
            xlog.error("get_gws_ip fail:%s", e)
            traceback.print_exc()
        finally:
            self.ip_lock.release()
コード例 #5
0
ファイル: web_control.py プロジェクト: hzg0102/XX-Net
    def req_log_handler(self):
        req = urlparse.urlparse(self.path).query
        reqs = urlparse.parse_qs(req, keep_blank_values=True)
        data = ''

        cmd = "get_last"
        if reqs["cmd"]:
            cmd = reqs["cmd"][0]
        if cmd == "set_buffer_size" :
            if not reqs["buffer_size"]:
                data = '{"res":"fail", "reason":"size not set"}'
                mimetype = 'text/plain'
                self.send_response(mimetype, data)
                return

            buffer_size = reqs["buffer_size"][0]
            xlog.set_buffer_size(buffer_size)
        elif cmd == "get_last":
            max_line = int(reqs["max_line"][0])
            data = xlog.get_last_lines(max_line)
        elif cmd == "get_new":
            last_no = int(reqs["last_no"][0])
            data = xlog.get_new_lines(last_no)
        else:
            xlog.error('PAC %s %s %s ', self.address_string(), self.command, self.path)

        mimetype = 'text/plain'
        self.send_response(mimetype, data)
コード例 #6
0
    def get_host_ip(self, host):
        self.try_sort_ip_by_handshake_time()

        self.ip_lock.acquire()
        try:
            ip_num = len(self.ip_dict)
            if ip_num == 0:
                #logging.warning("no gws ip")
                time.sleep(1)
                return None

            for ip_str in self.ip_dict:
                domain = self.ip_dict[ip_str]["domain"]
                if domain != host:
                    continue

                get_time = self.ip_dict[ip_str]["get_time"]
                if time.time() - get_time < 10:
                    continue
                handshake_time = self.ip_dict[ip_str]["handshake_time"]
                fail_time = self.ip_dict[ip_str]["fail_time"]
                if time.time() - fail_time < 300:
                    continue

                xlog.debug("get host:%s ip:%s t:%d", host, ip_str,
                           handshake_time)
                self.ip_dict[ip_str]['history'].append([time.time(), "get"])
                self.ip_dict[ip_str]['get_time'] = time.time()
                return ip_str
        except Exception as e:
            xlog.error("get_gws_ip fail:%s", e)
            traceback.print_exc()
        finally:
            self.ip_lock.release()
コード例 #7
0
ファイル: google_ip.py プロジェクト: az0ne/XX-Net
    def update_ip(self, ip_str, handshake_time):
        if not isinstance(ip_str, basestring):
            xlog.error("set_ip input")

        handshake_time = int(handshake_time)
        if handshake_time < 5: # this is impossible
            return

        self.ip_lock.acquire()
        try:
            if ip_str in self.ip_dict:

                # Case: some good ip, average handshake time is 300ms
                # some times ip package lost cause handshake time become 2000ms
                # this ip will not return back to good ip front until all become bad
                # There for, prevent handshake time increase too quickly.
                org_time = self.ip_dict[ip_str]['handshake_time']
                if handshake_time - org_time > 500:
                    self.ip_dict[ip_str]['handshake_time'] = org_time + 500
                else:
                    self.ip_dict[ip_str]['handshake_time'] = handshake_time

                self.ip_dict[ip_str]['timeout'] = 0
                self.ip_dict[ip_str]['history'].append([time.time(), handshake_time])
                self.ip_dict[ip_str]["fail_time"] = 0
                self.iplist_need_save = 1
                return

            #logging.debug("update ip:%s not exist", ip_str)
        except Exception as e:
            xlog.error("update_ip err:%s", e)
        finally:
            self.ip_lock.release()

        self.save_ip_list()
コード例 #8
0
ファイル: google_ip.py プロジェクト: az0ne/XX-Net
    def save_ip_list(self, force=False):
        if not force:
            if self.iplist_need_save == 0:
                return
            if time.time() - self.iplist_saved_time < 10:
                return

        self.iplist_saved_time = time.time()

        try:
            self.ip_lock.acquire()
            ip_dict = sorted(self.ip_dict.items(),  key=lambda x: x[1]['handshake_time'])
            with open(self.good_ip_file, "w") as fd:
                for ip_str, property in ip_dict:
                    fd.write( "%s %s %s %d\n" % (ip_str, property['domain'], property['server'], property['handshake_time']) )

            with open(self.bad_ip_file, "w") as fd:
                for ip in self.bad_ip_pool:
                    xlog.debug("save bad ip:%s", ip)
                    fd.write("%s\n" % (ip))

            self.iplist_need_save = 0
        except Exception as e:
            xlog.error("save good_ip.txt fail %s", e)
        finally:
            self.ip_lock.release()
コード例 #9
0
ファイル: google_ip.py プロジェクト: az0ne/XX-Net
    def try_sort_ip_by_handshake_time(self, force=False):
        if time.time() - self.last_sort_time_for_gws < 10 and not force:
            return
        self.last_sort_time_for_gws = time.time()

        self.ip_lock.acquire()
        try:
            ip_dict_handshake_time = {}
            for ip_str in self.ip_dict:
                if 'gws' not in self.ip_dict[ip_str]['server']:
                    continue
                ip_dict_handshake_time[ip_str] = self.ip_dict[ip_str]['handshake_time']

            ip_time = sorted(ip_dict_handshake_time.items(), key=operator.itemgetter(1))
            self.gws_ip_list = [ip_str for ip_str,handshake_time in ip_time]

        except Exception as e:
            xlog.error("try_sort_ip_by_handshake_time:%s", e)
        finally:
            self.ip_lock.release()

        time_cost = (( time.time() - self.last_sort_time_for_gws) * 1000)
        if time_cost > 30:
            xlog.debug("sort ip time:%dms", time_cost) # 5ms for 1000 ip. 70~150ms for 30000 ip.

        self.adjust_scan_thread_num()
コード例 #10
0
    def try_sort_ip_by_handshake_time(self, force=False):
        if time.time() - self.last_sort_time_for_gws < 10 and not force:
            return
        self.last_sort_time_for_gws = time.time()

        self.ip_lock.acquire()
        try:
            ip_dict_handshake_time = {}
            for ip_str in self.ip_dict:
                if 'gws' not in self.ip_dict[ip_str]['server']:
                    continue
                ip_dict_handshake_time[ip_str] = self.ip_dict[ip_str][
                    'handshake_time']

            ip_time = sorted(ip_dict_handshake_time.items(),
                             key=operator.itemgetter(1))
            self.gws_ip_list = [ip_str for ip_str, handshake_time in ip_time]

        except Exception as e:
            xlog.error("try_sort_ip_by_handshake_time:%s", e)
        finally:
            self.ip_lock.release()

        time_cost = ((time.time() - self.last_sort_time_for_gws) * 1000)
        if time_cost > 30:
            xlog.debug("sort ip time:%dms",
                       time_cost)  # 5ms for 1000 ip. 70~150ms for 30000 ip.

        self.adjust_scan_thread_num()
コード例 #11
0
ファイル: google_ip.py プロジェクト: sun3596209/XX-Net
    def try_sort_gws_ip(self, force=False):
        if time.time() - self.last_sort_time_for_gws < 10 and not force:
            return

        self.ip_lock.acquire()
        self.last_sort_time_for_gws = time.time()
        try:
            self.good_ip_num = 0
            ip_rate = {}
            for ip_str in self.ip_dict:
                if "gws" not in self.ip_dict[ip_str]["server"]:
                    continue
                ip_rate[ip_str] = self.ip_dict[ip_str]["handshake_time"] + (self.ip_dict[ip_str]["fail_times"] * 1000)
                if self.ip_dict[ip_str]["fail_times"] == 0:
                    self.good_ip_num += 1

            ip_time = sorted(ip_rate.items(), key=operator.itemgetter(1))
            self.gws_ip_list = [ip_str for ip_str, rate in ip_time]

        except Exception as e:
            xlog.error("try_sort_ip_by_handshake_time:%s", e)
        finally:
            self.ip_lock.release()

        time_cost = (time.time() - self.last_sort_time_for_gws) * 1000
        if time_cost > 30:
            xlog.debug("sort ip time:%dms", time_cost)  # 5ms for 1000 ip. 70~150ms for 30000 ip.

        self.adjust_scan_thread_num()
コード例 #12
0
    def save_ip_list(self, force=False):
        if not force:
            if self.iplist_need_save == 0:
                return
            if time.time() - self.iplist_saved_time < 10:
                return

        self.iplist_saved_time = time.time()

        try:
            self.ip_lock.acquire()
            ip_dict = sorted(self.ip_dict.items(),
                             key=lambda x: x[1]['handshake_time'])
            with open(self.good_ip_file, "w") as fd:
                for ip_str, property in ip_dict:
                    fd.write("%s %s %s %d\n" %
                             (ip_str, property['domain'], property['server'],
                              property['handshake_time']))

            with open(self.bad_ip_file, "w") as fd:
                for ip in self.bad_ip_pool:
                    xlog.debug("save bad ip:%s", ip)
                    fd.write("%s\n" % (ip))

            self.iplist_need_save = 0
        except Exception as e:
            xlog.error("save good_ip.txt fail %s", e)
        finally:
            self.ip_lock.release()
コード例 #13
0
ファイル: google_ip.py プロジェクト: sun3596209/XX-Net
    def save_ip_list(self, force=False):
        if not force:
            if self.iplist_need_save == 0:
                return
            if time.time() - self.iplist_saved_time < 10:
                return

        self.iplist_saved_time = time.time()

        try:
            self.ip_lock.acquire()
            ip_dict = sorted(self.ip_dict.items(), key=lambda x: (x[1]["handshake_time"] + x[1]["fail_times"] * 1000))
            with open(self.good_ip_file, "w") as fd:
                for ip_str, property in ip_dict:
                    fd.write(
                        "%s %s %s %d %d\n"
                        % (
                            ip_str,
                            property["domain"],
                            property["server"],
                            property["handshake_time"],
                            property["fail_times"],
                        )
                    )

            self.iplist_need_save = 0
        except Exception as e:
            xlog.error("save good_ip.txt fail %s", e)
        finally:
            self.ip_lock.release()
コード例 #14
0
    def req_log_handler(self):
        req = urlparse.urlparse(self.path).query
        reqs = urlparse.parse_qs(req, keep_blank_values=True)
        data = ''

        cmd = "get_last"
        if reqs["cmd"]:
            cmd = reqs["cmd"][0]
        if cmd == "set_buffer_size":
            if not reqs["buffer_size"]:
                data = '{"res":"fail", "reason":"size not set"}'
                mimetype = 'text/plain'
                self.send_response(mimetype, data)
                return

            buffer_size = reqs["buffer_size"][0]
            xlog.set_buffer_size(buffer_size)
        elif cmd == "get_last":
            max_line = int(reqs["max_line"][0])
            data = xlog.get_last_lines(max_line)
        elif cmd == "get_new":
            last_no = int(reqs["last_no"][0])
            data = xlog.get_new_lines(last_no)
        else:
            xlog.error('PAC %s %s %s ', self.address_string(), self.command,
                       self.path)

        mimetype = 'text/plain'
        self.send_response(mimetype, data)
コード例 #15
0
ファイル: google_ip.py プロジェクト: az0ne/XX-Net
    def get_host_ip(self, host):
        self.try_sort_ip_by_handshake_time()

        self.ip_lock.acquire()
        try:
            ip_num = len(self.ip_dict)
            if ip_num == 0:
                #logging.warning("no gws ip")
                time.sleep(1)
                return None

            for ip_str in self.ip_dict:
                domain = self.ip_dict[ip_str]["domain"]
                if domain != host:
                    continue

                get_time = self.ip_dict[ip_str]["get_time"]
                if time.time() - get_time < 10:
                    continue
                handshake_time = self.ip_dict[ip_str]["handshake_time"]
                fail_time = self.ip_dict[ip_str]["fail_time"]
                if time.time() - fail_time < 300:
                    continue

                xlog.debug("get host:%s ip:%s t:%d", host, ip_str, handshake_time)
                self.ip_dict[ip_str]['history'].append([time.time(), "get"])
                self.ip_dict[ip_str]['get_time'] = time.time()
                return ip_str
        except Exception as e:
            xlog.error("get_gws_ip fail:%s", e)
            traceback.print_exc()
        finally:
            self.ip_lock.release()
コード例 #16
0
ファイル: appids_manager.py プロジェクト: zctea/XX-Net
 def get_appid(self):
     if len(self.working_appid_list) == 0:
         xlog.error(
             "No usable appid left, add new appid to continue use GAEProxy")
         return None
     else:
         return random.choice(self.working_appid_list)
コード例 #17
0
    def add_ip(self, ip_str, handshake_time, domain=None, server=None):
        if not isinstance(ip_str, basestring):
            xlog.error("add_ip input")

        handshake_time = int(handshake_time)

        self.ip_lock.acquire()
        try:
            if ip_str in self.ip_dict:
                self.ip_dict[ip_str]['handshake_time'] = handshake_time
                self.ip_dict[ip_str]['timeout'] = 0
                self.ip_dict[ip_str]['history'].append(
                    [time.time(), handshake_time])
                return False

            self.iplist_need_save = 1

            self.ip_dict[ip_str] = {
                'handshake_time': handshake_time,
                'domain': domain,
                'server': server,
                'timeout': 0,
                "history": [[time.time(), handshake_time]],
                "fail_time": 0,
                "get_time": 0
            }

            if 'gws' in server:
                self.gws_ip_list.append(ip_str)
            return True
        except Exception as e:
            xlog.error("set_ip err:%s", e)
        finally:
            self.ip_lock.release()
        return False
コード例 #18
0
ファイル: google_ip.py プロジェクト: neteasy-work/XX-Net
    def get_gws_ip(self):
        self.try_sort_gws_ip()

        self.ip_lock.acquire()
        try:
            ip_num = len(self.gws_ip_list)
            if ip_num == 0:
                #logging.warning("no gws ip")
                time.sleep(10)
                return None

            for i in range(ip_num):

                if self.gws_ip_pointer >= ip_num:
                    if time.time() - self.gws_ip_pointer_reset_time < 1:
                        time.sleep(1)
                        continue
                    else:
                        self.gws_ip_pointer = 0
                        self.gws_ip_pointer_reset_time = time.time()
                elif self.gws_ip_pointer > 0 and time.time() - self.gws_ip_pointer_reset_time > 3:
                    self.gws_ip_pointer = 0
                    self.gws_ip_pointer_reset_time = time.time()

                ip_str = self.gws_ip_list[self.gws_ip_pointer]
                get_time = self.ip_dict[ip_str]["get_time"]
                if time.time() - get_time < self.ip_connect_interval:
                    self.gws_ip_pointer += 1
                    continue

                if time.time() - self.ip_dict[ip_str]['success_time'] > 300: # 5 min
                    fail_connect_interval = 1800 # 30 min
                else:
                    fail_connect_interval = 120 # 2 min
                fail_time = self.ip_dict[ip_str]["fail_time"]
                if time.time() - fail_time < fail_connect_interval:
                    self.gws_ip_pointer += 1
                    continue

                time_now = time.time()
                active_time = self.ip_dict[ip_str]['data_active']
                transfered_data = self.ip_dict[ip_str]['transfered_data'] - ((time_now - active_time) * config.ip_traffic_quota)
                if transfered_data > config.ip_traffic_quota_base:
                    self.gws_ip_pointer += 1
                    continue

                handshake_time = self.ip_dict[ip_str]["handshake_time"]
                xlog.debug("get ip:%s t:%d", ip_str, handshake_time)
                self.ip_dict[ip_str]['history'].append([time.time(), "get"])
                self.ip_dict[ip_str]['get_time'] = time.time()
                self.gws_ip_pointer += 1
                return ip_str
        except Exception as e:
            xlog.error("get_gws_ip fail:%s", e)
            traceback.print_exc()
        finally:
            self.ip_lock.release()
コード例 #19
0
ファイル: google_ip.py プロジェクト: sun3596209/XX-Net
 def report_connect_closed(self, ip_str, reason=""):
     xlog.debug("%s close:%s", ip_str, reason)
     self.ip_lock.acquire()
     try:
         if ip_str in self.ip_dict:
             self.ip_dict[ip_str]["links"] -= 1
             self.append_ip_history(ip_str, "C[%s]" % reason)
     except Exception as e:
         xlog.error("report_connect_closed err:%s", e)
     finally:
         self.ip_lock.release()
コード例 #20
0
ファイル: google_ip.py プロジェクト: zctea/XX-Net
 def report_connect_closed(self, ip_str, reason=""):
     xlog.debug("%s close:%s", ip_str, reason)
     self.ip_lock.acquire()
     try:
         if ip_str in self.ip_dict:
             self.ip_dict[ip_str]['links'] -= 1
             self.append_ip_history(ip_str, "C[%s]" % reason)
     except Exception as e:
         xlog.error("report_connect_closed err:%s", e)
     finally:
         self.ip_lock.release()
コード例 #21
0
    def head_request(self, ssl_sock):
        if ssl_sock.host == '':
            ssl_sock.appid = appid_manager.get_appid()
            if not ssl_sock.appid:
                xlog.error("no appid can use")
                return False
            host = ssl_sock.appid + ".appspot.com"
            ssl_sock.host = host
        else:
            host = ssl_sock.host

        # public appid don't keep alive, for quota limit.
        if ssl_sock.appid.startswith(
                "xxnet-") and ssl_sock.appid[7:].isdigit():
            #logging.info("public appid don't keep alive")
            #self.keep_alive = 0
            return False

        #logging.debug("head request %s", host)

        request_data = 'HEAD /_gh/ HTTP/1.1\r\nHost: %s\r\n\r\n' % host

        response = None
        try:
            ssl_sock.settimeout(10)
            ssl_sock.sock.settimeout(10)

            data = request_data.encode()
            ret = ssl_sock.send(data)
            if ret != len(data):
                xlog.warn("head send len:%d %d", ret, len(data))
            response = httplib.HTTPResponse(ssl_sock, buffering=True)

            response.begin()

            status = response.status
            if status != 200:
                xlog.debug("app head fail status:%d", status)
                raise Exception("app check fail")
            return True
        except httplib.BadStatusLine as e:
            inactive_time = time.time() - ssl_sock.last_use_time
            xlog.debug("%s keep alive fail, time:%d", ssl_sock.ip,
                       inactive_time)
            return False
        except Exception as e:
            xlog.debug("head request fail:%r", e)
            return False
        finally:
            if response:
                response.close()
コード例 #22
0
ファイル: google_ip.py プロジェクト: zctea/XX-Net
    def add_ip(self,
               ip_str,
               handshake_time,
               domain=None,
               server='',
               fail_times=0):
        if not isinstance(ip_str, basestring):
            xlog.error("add_ip input")
            return

        if config.USE_IPV6 and ":" not in ip_str:
            xlog.warn("add %s but ipv6", ip_str)
            return

        handshake_time = int(handshake_time)

        self.ip_lock.acquire()
        try:
            if ip_str in self.ip_dict:
                self.ip_dict[ip_str]['handshake_time'] = handshake_time
                self.ip_dict[ip_str]['fail_times'] = fail_times
                self.ip_dict[ip_str]['fail_time'] = 0
                self.append_ip_history(ip_str, handshake_time)
                return False

            self.iplist_need_save = 1
            self.good_ip_num += 1

            self.ip_dict[ip_str] = {
                'handshake_time': handshake_time,
                "fail_times": fail_times,
                "transfered_data": 0,
                'data_active': 0,
                'domain': domain,
                'server': server,
                "history": [[time.time(), handshake_time]],
                "fail_time": 0,
                "success_time": 0,
                "get_time": 0,
                "links": 0
            }

            if 'gws' in server:
                self.gws_ip_list.append(ip_str)
            return True
        except Exception as e:
            xlog.exception("add_ip err:%s", e)
        finally:
            self.ip_lock.release()
        return False
コード例 #23
0
def load_sock():
    if config.PROXY_ENABLE:
        if config.PROXY_TYPE == "HTTP":
            proxy_type = socks.HTTP
        elif config.PROXY_TYPE == "SOCKS4":
            proxy_type = socks.SOCKS4
        elif config.PROXY_TYPE == "SOCKS5":
            proxy_type = socks.SOCKS5
        else:
            xlog.error("proxy type %s unknown, disable proxy", config.PROXY_TYPE)
            config.PROXY_ENABLE = 0

    if config.PROXY_ENABLE:
        socks.set_default_proxy(proxy_type, config.PROXY_HOST, config.PROXY_PORT, config.PROXY_USER, config.PROXY_PASSWD)
コード例 #24
0
    def head_request(self, ssl_sock):
        if ssl_sock.host == '':
            ssl_sock.appid = appid_manager.get_appid()
            if not ssl_sock.appid:
                xlog.error("no appid can use")
                return False
            host = ssl_sock.appid + ".appspot.com"
            ssl_sock.host = host
        else:
            host = ssl_sock.host

        # public appid don't keep alive, for quota limit.
        if ssl_sock.appid.startswith("xxnet-") and ssl_sock.appid[7:].isdigit():
            #logging.info("public appid don't keep alive")
            #self.keep_alive = 0
            return False

        #logging.debug("head request %s", host)

        request_data = 'HEAD /_gh/ HTTP/1.1\r\nHost: %s\r\n\r\n' % host

        response = None
        try:
            ssl_sock.settimeout(10)
            ssl_sock.sock.settimeout(10)

            data = request_data.encode()
            ret = ssl_sock.send(data)
            if ret != len(data):
                xlog.warn("head send len:%d %d", ret, len(data))
            response = httplib.HTTPResponse(ssl_sock, buffering=True)

            response.begin()

            status = response.status
            if status != 200:
                xlog.debug("app head fail status:%d", status)
                raise Exception("app check fail")
            return True
        except httplib.BadStatusLine as e:
            inactive_time = time.time() - ssl_sock.last_use_time
            xlog.debug("%s keep alive fail, time:%d", ssl_sock.ip, inactive_time)
            return False
        except Exception as e:
            xlog.debug("head request fail:%r", e)
            return False
        finally:
            if response:
                response.close()
コード例 #25
0
ファイル: google_ip_range.py プロジェクト: zctea/XX-Net
    def load_range_content(self):
        if os.path.isfile(user_range_file):
            self.range_file = user_range_file
        else:
            self.range_file = default_range_file

        xlog.info("load ip range file:%s", self.range_file)
        fd = open(self.range_file, "r")
        if not fd:
            xlog.error("load ip range %s fail", self.range_file)
            return

        content = fd.read()
        fd.close()
        return content
コード例 #26
0
ファイル: google_ip_range.py プロジェクト: sun3596209/XX-Net
    def load_range_content(self):
        if os.path.isfile(user_range_file):
            self.range_file = user_range_file
        else:
            self.range_file = default_range_file

        xlog.info("load ip range file:%s", self.range_file)
        fd = open(self.range_file, "r")
        if not fd:
            xlog.error("load ip range %s fail", self.range_file)
            return

        content = fd.read()
        fd.close()
        return content
コード例 #27
0
    def get_gws_ip(self):
        self.try_sort_ip_by_handshake_time()

        self.ip_lock.acquire()
        try:
            ip_num = len(self.gws_ip_list)
            for i in range(ip_num):
                if ip_num == 0:
                    #logging.warning("no gws ip")
                    time.sleep(1)
                    return None

                if self.gws_ip_pointer >= ip_num:
                    if time.time() - self.gws_ip_pointer_reset_time < 1:
                        time.sleep(1)
                        continue
                    else:
                        self.gws_ip_pointer = 0
                        self.gws_ip_pointer_reset_time = time.time()
                elif self.gws_ip_pointer > 0 and time.time(
                ) - self.gws_ip_pointer_reset_time > 3:
                    self.gws_ip_pointer = 0
                    self.gws_ip_pointer_reset_time = time.time()

                ip_str = self.gws_ip_list[self.gws_ip_pointer]
                if self.is_bad_ip(ip_str):
                    self.gws_ip_pointer += 1
                    continue
                get_time = self.ip_dict[ip_str]["get_time"]
                if time.time() - get_time < self.ip_connect_interval:
                    self.gws_ip_pointer += 1
                    continue
                handshake_time = self.ip_dict[ip_str]["handshake_time"]
                fail_time = self.ip_dict[ip_str]["fail_time"]
                if time.time() - fail_time < 300:
                    self.gws_ip_pointer += 1
                    continue

                xlog.debug("get ip:%s t:%d", ip_str, handshake_time)
                self.ip_dict[ip_str]['history'].append([time.time(), "get"])
                self.ip_dict[ip_str]['get_time'] = time.time()
                self.gws_ip_pointer += 1
                return ip_str
        except Exception as e:
            xlog.error("get_gws_ip fail:%s", e)
            traceback.print_exc()
        finally:
            self.ip_lock.release()
コード例 #28
0
ファイル: check_ip.py プロジェクト: zctea/XX-Net
def load_proxy_config():
    global default_socket
    if config.PROXY_ENABLE:

        if config.PROXY_TYPE == "HTTP":
            proxy_type = socks.HTTP
        elif config.PROXY_TYPE == "SOCKS4":
            proxy_type = socks.SOCKS4
        elif config.PROXY_TYPE == "SOCKS5":
            proxy_type = socks.SOCKS5
        else:
            xlog.error("proxy type %s unknown, disable proxy", config.PROXY_TYPE)
            raise

        socks.set_default_proxy(proxy_type, config.PROXY_HOST, config.PROXY_PORT, config.PROXY_USER, config.PROXY_PASSWD)
        default_socket = socket.socket
コード例 #29
0
ファイル: google_ip.py プロジェクト: az0ne/XX-Net
    def get_gws_ip(self):
        self.try_sort_ip_by_handshake_time()

        self.ip_lock.acquire()
        try:
            ip_num = len(self.gws_ip_list)
            for i in range(ip_num):
                if ip_num == 0:
                    #logging.warning("no gws ip")
                    time.sleep(1)
                    return None

                if self.gws_ip_pointer >= ip_num:
                    if time.time() - self.gws_ip_pointer_reset_time < 1:
                        time.sleep(1)
                        continue
                    else:
                        self.gws_ip_pointer = 0
                        self.gws_ip_pointer_reset_time = time.time()
                elif self.gws_ip_pointer > 0 and time.time() - self.gws_ip_pointer_reset_time > 3:
                    self.gws_ip_pointer = 0
                    self.gws_ip_pointer_reset_time = time.time()

                ip_str = self.gws_ip_list[self.gws_ip_pointer]
                if self.is_bad_ip(ip_str):
                    self.gws_ip_pointer += 1
                    continue
                get_time = self.ip_dict[ip_str]["get_time"]
                if time.time() - get_time < self.ip_connect_interval:
                    self.gws_ip_pointer += 1
                    continue
                handshake_time = self.ip_dict[ip_str]["handshake_time"]
                fail_time = self.ip_dict[ip_str]["fail_time"]
                if time.time() - fail_time < 300:
                    self.gws_ip_pointer += 1
                    continue

                xlog.debug("get ip:%s t:%d", ip_str, handshake_time)
                self.ip_dict[ip_str]['history'].append([time.time(), "get"])
                self.ip_dict[ip_str]['get_time'] = time.time()
                self.gws_ip_pointer += 1
                return ip_str
        except Exception as e:
            xlog.error("get_gws_ip fail:%s", e)
            traceback.print_exc()
        finally:
            self.ip_lock.release()
コード例 #30
0
ファイル: google_ip.py プロジェクト: sun3596209/XX-Net
    def add_ip(self, ip_str, handshake_time, domain=None, server="", fail_times=0):
        if not isinstance(ip_str, basestring):
            xlog.error("add_ip input")
            return

        if config.USE_IPV6 and ":" not in ip_str:
            xlog.warn("add %s but ipv6", ip_str)
            return

        handshake_time = int(handshake_time)

        self.ip_lock.acquire()
        try:
            if ip_str in self.ip_dict:
                self.ip_dict[ip_str]["handshake_time"] = handshake_time
                self.ip_dict[ip_str]["fail_times"] = fail_times
                self.ip_dict[ip_str]["fail_time"] = 0
                self.append_ip_history(ip_str, handshake_time)
                return False

            self.iplist_need_save = 1
            self.good_ip_num += 1

            self.ip_dict[ip_str] = {
                "handshake_time": handshake_time,
                "fail_times": fail_times,
                "transfered_data": 0,
                "data_active": 0,
                "domain": domain,
                "server": server,
                "history": [[time.time(), handshake_time]],
                "fail_time": 0,
                "success_time": 0,
                "get_time": 0,
                "links": 0,
            }

            if "gws" in server:
                self.gws_ip_list.append(ip_str)
            return True
        except Exception as e:
            xlog.exception("add_ip err:%s", e)
        finally:
            self.ip_lock.release()
        return False
コード例 #31
0
ファイル: check_ip.py プロジェクト: sun3596209/XX-Net
def load_proxy_config():
    global default_socket
    if config.PROXY_ENABLE:

        if config.PROXY_TYPE == "HTTP":
            proxy_type = socks.HTTP
        elif config.PROXY_TYPE == "SOCKS4":
            proxy_type = socks.SOCKS4
        elif config.PROXY_TYPE == "SOCKS5":
            proxy_type = socks.SOCKS5
        else:
            xlog.error("proxy type %s unknown, disable proxy", config.PROXY_TYPE)
            raise

        socks.set_default_proxy(
            proxy_type, config.PROXY_HOST, config.PROXY_PORT, config.PROXY_USER, config.PROXY_PASSWD
        )
        default_socket = socket.socket
コード例 #32
0
ファイル: xconfig.py プロジェクト: JustL1keThat/ladder
    def load(self):
        self.last_load_time = time.time()
        if os.path.isfile(self.config_path):
            with open(self.config_path, 'r') as f:
                content = f.read()
                content = content.strip()
                content = content.replace("\r", "")
                content = content.replace("\n", "")
                content = content.replace(",}", "}")
                try:
                    self.file_config = json.loads(content)
                except Exception as e:
                    xlog.error("Loading config:%s fail:r", self.config_path, e)
                    return

        for var_name in self.default_config:
            if self.file_config and var_name in self.file_config:
                setattr(self, var_name, self.file_config[var_name])
            else:
                setattr(self, var_name, self.default_config[var_name])
コード例 #33
0
ファイル: google_ip.py プロジェクト: sun3596209/XX-Net
    def update_ip(self, ip_str, handshake_time):
        if not isinstance(ip_str, basestring):
            xlog.error("set_ip input")
            return

        handshake_time = int(handshake_time)
        if handshake_time < 5:  # that's impossible
            xlog.warn("%s handshake:%d impossible", ip_str, 1000 * handshake_time)
            return

        self.ip_lock.acquire()
        try:
            if ip_str in self.ip_dict:
                time_now = time.time()

                # Case: some good ip, average handshake time is 300ms
                # some times ip package lost cause handshake time become 2000ms
                # this ip will not return back to good ip front until all become bad
                # There for, prevent handshake time increase too quickly.
                org_time = self.ip_dict[ip_str]["handshake_time"]
                if handshake_time - org_time > 500:
                    self.ip_dict[ip_str]["handshake_time"] = org_time + 500
                else:
                    self.ip_dict[ip_str]["handshake_time"] = handshake_time

                self.ip_dict[ip_str]["success_time"] = time_now
                if self.ip_dict[ip_str]["fail_times"] > 0:
                    self.good_ip_num += 1
                self.ip_dict[ip_str]["fail_times"] = 0
                self.append_ip_history(ip_str, handshake_time)
                self.ip_dict[ip_str]["fail_time"] = 0

                self.iplist_need_save = 1

            # logging.debug("update ip:%s not exist", ip_str)
        except Exception as e:
            xlog.error("update_ip err:%s", e)
        finally:
            self.ip_lock.release()

        self.save_ip_list()
コード例 #34
0
    def report_ip_traffic(self, ip_str, bytes):
        if bytes == 0:
            return

        self.ip_lock.acquire()
        try:
            if ip_str in self.ip_dict:
                time_now = time.time()

                active_time = self.ip_dict[ip_str]['data_active']
                transfered_data = self.ip_dict[ip_str]['transfered_data'] - ((time_now - active_time) * config.ip_traffic_quota)
                if transfered_data < 0:
                    transfered_data = 0

                transfered_data += bytes
                self.ip_dict[ip_str]['transfered_data'] = transfered_data
                self.ip_dict[ip_str]['data_active'] = time_now
                self.ip_dict[ip_str]['history'].append([time.time(), "%d_B" % bytes])
        except Exception as e:
            xlog.error("report_ip_trafic err:%s", e)
        finally:
            self.ip_lock.release()
コード例 #35
0
ファイル: proxy_handler.py プロジェクト: zhhwin/XX-Net
    def do_AGENT(self):
        def get_crlf(rfile):
            crlf = rfile.readline(2)
            if crlf != "\r\n":
                xlog.warn("chunk header read fail crlf")

        request_headers = dict((k.title(), v) for k, v in self.headers.items())

        payload = b''
        if 'Content-Length' in request_headers:
            try:
                payload_len = int(request_headers.get('Content-Length', 0))
                #logging.debug("payload_len:%d %s %s", payload_len, self.command, self.path)
                payload = self.rfile.read(payload_len)
            except NetWorkIOError as e:
                xlog.error('handle_method_urlfetch read payload failed:%s', e)
                return
        elif 'Transfer-Encoding' in request_headers:
            # chunked, used by facebook android client
            payload = ""
            while True:
                chunk_size_str = self.rfile.readline(65537)
                chunk_size_list = chunk_size_str.split(";")
                chunk_size = int("0x" + chunk_size_list[0], 0)
                if len(chunk_size_list) > 1 and chunk_size_list[1] != "\r\n":
                    xlog.warn("chunk ext: %s", chunk_size_str)
                if chunk_size == 0:
                    while True:
                        line = self.rfile.readline(65537)
                        if line == "\r\n":
                            break
                        else:
                            xlog.warn("entity header:%s", line)
                    break
                payload += self.rfile.read(chunk_size)
                get_crlf(self.rfile)

        gae_handler.handler(self.command, self.path, request_headers, payload,
                            self.wfile)
コード例 #36
0
ファイル: proxy_handler.py プロジェクト: 23niu/XX-Net
    def do_AGENT(self):
        def get_crlf(rfile):
            crlf = rfile.readline(2)
            if crlf != "\r\n":
                xlog.warn("chunk header read fail crlf")

        request_headers = dict((k.title(), v) for k, v in self.headers.items())

        payload = b''
        if 'Content-Length' in request_headers:
            try:
                payload_len = int(request_headers.get('Content-Length', 0))
                #logging.debug("payload_len:%d %s %s", payload_len, self.command, self.path)
                payload = self.rfile.read(payload_len)
            except NetWorkIOError as e:
                xlog.error('handle_method_urlfetch read payload failed:%s', e)
                return
        elif 'Transfer-Encoding' in request_headers:
            # chunked, used by facebook android client
            payload = ""
            while True:
                chunk_size_str = self.rfile.readline(65537)
                chunk_size_list = chunk_size_str.split(";")
                chunk_size = int("0x"+chunk_size_list[0], 0)
                if len(chunk_size_list) > 1 and chunk_size_list[1] != "\r\n":
                    xlog.warn("chunk ext: %s", chunk_size_str)
                if chunk_size == 0:
                    while True:
                        line = self.rfile.readline(65537)
                        if line == "\r\n":
                            break
                        else:
                            xlog.warn("entity header:%s", line)
                    break
                payload += self.rfile.read(chunk_size)
                get_crlf(self.rfile)

        gae_handler.handler(self.command, self.path, request_headers, payload, self.wfile)
コード例 #37
0
def run_imports():
    global imported, slow_xor, salsa20, numpy
    if not imported:
        imported = True
        try:
            numpy = __import__('numpy')
        except ImportError:
            xlog.error('can not import numpy, using SLOW XOR')
            xlog.error('please install numpy if you use salsa20')
            slow_xor = True
        try:
            salsa20 = __import__('salsa20')
        except ImportError:
            xlog.error('you have to install salsa20 before you use salsa20')
            sys.exit(1)
コード例 #38
0
def run_imports():
    global imported, slow_xor, salsa20, numpy
    if not imported:
        imported = True
        try:
            numpy = __import__('numpy')
        except ImportError:
            xlog.error('can not import numpy, using SLOW XOR')
            xlog.error('please install numpy if you use salsa20')
            slow_xor = True
        try:
            salsa20 = __import__('salsa20')
        except ImportError:
            xlog.error('you have to install salsa20 before you use salsa20')
            sys.exit(1)
コード例 #39
0
ファイル: gae_handler.py プロジェクト: ttkx/XX-Net
    def fetch(self):
        response_headers = dict(
            (k.title(), v) for k, v in self.response.getheaders())
        content_range = response_headers['Content-Range']
        start, end, length = tuple(
            int(x) for x in re.search(r'bytes (\d+)-(\d+)/(\d+)',
                                      content_range).group(1, 2, 3))
        if start == 0:
            response_headers['Content-Length'] = str(length)
            del response_headers['Content-Range']
        else:
            response_headers['Content-Range'] = 'bytes %s-%s/%s' % (start, end,
                                                                    length)
            response_headers['Content-Length'] = str(length - start)

        xlog.info('>>>>>>>>>>>>>>> RangeFetch started(%r) %d-%d', self.url,
                  start, end)

        try:
            self.wfile.write("HTTP/1.1 200 OK\r\n")
            for key in response_headers:
                if key == 'Transfer-Encoding':
                    continue
                if key == 'X-Head-Content-Length':
                    continue
                if key in skip_headers:
                    continue
                value = response_headers[key]
                #logging.debug("Head %s: %s", key.title(), value)
                send_header(self.wfile, key, value)
            self.wfile.write("\r\n")
        except Exception as e:
            self._stopped = True
            xlog.warn("RangeFetch send response fail:%r %s", e, self.url)
            return

        data_queue = Queue.PriorityQueue()
        range_queue = Queue.PriorityQueue()
        range_queue.put((start, end, self.response))
        self.expect_begin = start
        for begin in range(end + 1, length, self.maxsize):
            range_queue.put((begin, min(begin + self.maxsize - 1,
                                        length - 1), None))
        for i in xrange(0, self.threads):
            range_delay_size = i * self.maxsize
            spawn_later(
                float(range_delay_size) / self.waitsize, self.__fetchlet,
                range_queue, data_queue, range_delay_size)

        has_peek = hasattr(data_queue, 'peek')
        peek_timeout = 120
        while self.expect_begin < length - 1:
            try:
                if has_peek:
                    begin, data = data_queue.peek(timeout=peek_timeout)
                    if self.expect_begin == begin:
                        data_queue.get()
                    elif self.expect_begin < begin:
                        time.sleep(0.1)
                        continue
                    else:
                        xlog.error(
                            'RangeFetch Error: begin(%r) < expect_begin(%r), quit.',
                            begin, self.expect_begin)
                        break
                else:
                    begin, data = data_queue.get(timeout=peek_timeout)
                    if self.expect_begin == begin:
                        pass
                    elif self.expect_begin < begin:
                        data_queue.put((begin, data))
                        time.sleep(0.1)
                        continue
                    else:
                        xlog.error(
                            'RangeFetch Error: begin(%r) < expect_begin(%r), quit.',
                            begin, self.expect_begin)
                        break
            except Queue.Empty:
                xlog.error('data_queue peek timeout, break')
                break

            try:
                ret = self.wfile.write(data)
                if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ:
                    xlog.debug("send to browser wfile.write ret:%d, retry",
                               ret)
                    ret = self.wfile.write(data)
                    xlog.debug("send to browser wfile.write ret:%d", ret)
                self.expect_begin += len(data)
                del data
            except Exception as e:
                xlog.warn('RangeFetch client closed(%s). %s', e, self.url)
                break
        self._stopped = True
コード例 #40
0
def err(alg, key, iv, op, key_as_bytes=0, d=None, salt=None, i=1, padding=1):
    xlog.error(('M2Crypto is required to use %s, please run'
                ' `apt-get install python-m2crypto`') % alg)
    sys.exit(1)
コード例 #41
0
ファイル: appids_manager.py プロジェクト: sun3596209/XX-Net
 def get_appid(self):
     if len(self.working_appid_list) == 0:
         xlog.error("No usable appid left, add new appid to continue use GAEProxy")
         return None
     else:
         return random.choice(self.working_appid_list)
コード例 #42
0
ファイル: m2.py プロジェクト: 03013405yujiangfeng/XX-Net
def err(alg, key, iv, op, key_as_bytes=0, d=None, salt=None, i=1, padding=1):
    xlog.error(('M2Crypto is required to use %s, please run'
                   ' `apt-get install python-m2crypto`') % alg)
    sys.exit(1)
コード例 #43
0
ファイル: gae_handler.py プロジェクト: ttkx/XX-Net
    def __fetchlet(self, range_queue, data_queue, range_delay_size):
        headers = dict((k.title(), v) for k, v in self.headers.items())
        headers['Connection'] = 'close'
        while not self._stopped:
            try:
                try:
                    start, end, response = range_queue.get(timeout=1)
                    if self.expect_begin < start and data_queue.qsize(
                    ) * self.bufsize + range_delay_size > 30 * 1024 * 1024:
                        range_queue.put((start, end, response))
                        time.sleep(10)
                        continue
                    headers['Range'] = 'bytes=%d-%d' % (start, end)
                    if not response:
                        response = fetch(self.method, self.url, headers,
                                         self.body)
                except Queue.Empty:
                    continue
                except Exception as e:
                    xlog.warning("RangeFetch fetch response %r in __fetchlet",
                                 e)
                    range_queue.put((start, end, None))
                    continue

                if not response:
                    xlog.warning('RangeFetch %s return %r', headers['Range'],
                                 response)
                    range_queue.put((start, end, None))
                    continue
                if response.app_status != 200:
                    xlog.warning('Range Fetch return %s "%s %s" %s ',
                                 response.app_status, self.method, self.url,
                                 headers['Range'])

                    if response.app_status == 404:
                        xlog.warning('APPID %r not exists, remove it.',
                                     response.ssl_sock.appid)
                        appid_manager.report_not_exist(response.ssl_sock.appid)
                        appid = appid_manager.get_appid()
                        if not appid:
                            xlog.error("no appid left")
                            self._stopped = True
                            response.close()
                            return

                    if response.app_status == 503:
                        xlog.warning(
                            'APPID %r out of Quota, remove it temporary.',
                            response.ssl_sock.appid)
                        appid_manager.report_out_of_quota(
                            response.ssl_sock.appid)
                        appid = appid_manager.get_appid()
                        if not appid:
                            xlog.error("no appid left")
                            self._stopped = True
                            response.close()
                            return

                    response.close()
                    range_queue.put((start, end, None))
                    continue

                if response.getheader('Location'):
                    self.url = urlparse.urljoin(self.url,
                                                response.getheader('Location'))
                    xlog.info('RangeFetch Redirect(%r)', self.url)
                    response.close()
                    range_queue.put((start, end, None))
                    continue

                if 200 <= response.status < 300:
                    content_range = response.getheader('Content-Range')
                    if not content_range:
                        xlog.warning(
                            'RangeFetch "%s %s" return Content-Range=%r: response headers=%r, retry %s-%s',
                            self.method, self.url, content_range,
                            response.getheaders(), start, end)
                        response.close()
                        range_queue.put((start, end, None))
                        continue
                    content_length = int(
                        response.getheader('Content-Length', 0))
                    xlog.info('>>>>>>>>>>>>>>> [thread %s] %s %s',
                              threading.currentThread().ident, content_length,
                              content_range)

                    time_last_read = time.time()
                    while start < end + 1:
                        try:
                            data = response.read(self.bufsize)
                            if not data:
                                if time.time() - time_last_read > 20:
                                    break
                                else:
                                    time.sleep(0.1)
                                    continue

                            time_last_read = time.time()
                            data_len = len(data)
                            data_queue.put((start, data))
                            start += data_len

                        except Exception as e:
                            xlog.warning('RangeFetch "%s %s" %s failed: %s',
                                         self.method, self.url,
                                         headers['Range'], e)
                            break

                    if start < end + 1:
                        xlog.warning('RangeFetch "%s %s" retry %s-%s',
                                     self.method, self.url, start, end)
                        response.close()
                        range_queue.put((start, end, None))
                        continue

                    https_manager.save_ssl_connection_for_reuse(
                        response.ssl_sock)
                    xlog.info('>>>>>>>>>>>>>>> Successfully reached %d bytes.',
                              start - 1)
                else:
                    xlog.error('RangeFetch %r return %s', self.url,
                               response.status)
                    response.close()
                    range_queue.put((start, end, None))
                    continue
            except StandardError as e:
                xlog.exception('RangeFetch._fetchlet error:%s', e)
                raise
コード例 #44
0
ファイル: proxy_handler.py プロジェクト: zhhwin/XX-Net
    def do_CONNECT_DIRECT(self):
        """deploy fake cert to client"""
        host, _, port = self.path.rpartition(':')
        port = int(port)
        if port != 443:
            xlog.warn("CONNECT %s port:%d not support", host, port)
            return

        certfile = CertUtil.get_cert(host)
        xlog.info('GAE %s %s:%d ', self.command, host, port)
        self.__realconnection = None
        self.wfile.write(b'HTTP/1.1 200 OK\r\n\r\n')

        try:
            ssl_sock = ssl.wrap_socket(self.connection,
                                       keyfile=certfile,
                                       certfile=certfile,
                                       server_side=True)
        except ssl.SSLError as e:
            xlog.info('ssl error: %s, create full domain cert for host:%s', e,
                      host)
            certfile = CertUtil.get_cert(host, full_name=True)
            return
        except Exception as e:
            if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET):
                xlog.exception(
                    'ssl.wrap_socket(self.connection=%r) failed: %s path:%s, errno:%s',
                    self.connection, e, self.path, e.args[0])
            return

        self.__realconnection = self.connection
        self.__realwfile = self.wfile
        self.__realrfile = self.rfile
        self.connection = ssl_sock
        self.rfile = self.connection.makefile('rb', self.bufsize)
        self.wfile = self.connection.makefile('wb', 0)

        try:
            self.raw_requestline = self.rfile.readline(65537)
            if len(self.raw_requestline) > 65536:
                self.requestline = ''
                self.request_version = ''
                self.command = ''
                self.send_error(414)
                return
            if not self.raw_requestline:
                self.close_connection = 1
                return
            if not self.parse_request():
                return
        except NetWorkIOError as e:
            if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET,
                                 errno.EPIPE):
                raise
        if self.path[0] == '/' and host:
            self.path = 'https://%s%s' % (self.headers['Host'], self.path)
        xlog.debug('GAE CONNECT %s %s', self.command, self.path)

        try:
            if self.path[0] == '/' and host:
                self.path = 'http://%s%s' % (host, self.path)
            elif not host and '://' in self.path:
                host = urlparse.urlparse(self.path).netloc

            self.parsed_url = urlparse.urlparse(self.path)
            if len(self.parsed_url[4]):
                path = '?'.join([self.parsed_url[2], self.parsed_url[4]])
            else:
                path = self.parsed_url[2]

            request_headers = dict(
                (k.title(), v) for k, v in self.headers.items())

            payload = b''
            if 'Content-Length' in request_headers:
                try:
                    payload_len = int(request_headers.get('Content-Length', 0))
                    #logging.debug("payload_len:%d %s %s", payload_len, self.command, self.path)
                    payload = self.rfile.read(payload_len)
                except NetWorkIOError as e:
                    xlog.error('handle_method_urlfetch read payload failed:%s',
                               e)
                    return

            direct_handler.handler(self.command, host, path, request_headers,
                                   payload, self.wfile)

        except NetWorkIOError as e:
            if e.args[0] not in (errno.ECONNABORTED, errno.ETIMEDOUT,
                                 errno.EPIPE):
                raise
        finally:
            if self.__realconnection:
                try:
                    self.__realconnection.shutdown(socket.SHUT_WR)
                    self.__realconnection.close()
                except NetWorkIOError:
                    pass
                finally:
                    self.__realconnection = None
コード例 #45
0
ファイル: connect_manager.py プロジェクト: hzg0102/XX-Net
    def create_connection(self, host="", port=443, sock_life=5):
        if port != 443:
            xlog.warn("forward port %d not supported.", port)
            return None

        def _create_connection(ip_port, delay=0):
            time.sleep(delay)
            ip = ip_port[0]
            sock = None
            # start connection time record
            start_time = time.time()
            conn_time = 0
            try:
                # create a ipv4/ipv6 socket object
                if config.PROXY_ENABLE:
                    sock = socks.socksocket(socket.AF_INET if ':' not in ip else socket.AF_INET6)
                else:
                    sock = socket.socket(socket.AF_INET if ':' not in ip else socket.AF_INET6)
                # set reuseaddr option to avoid 10048 socket error
                sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
                # resize socket recv buffer 8K->32K to improve browser releated application performance
                sock.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 32*1024)
                # disable negal algorithm to send http request quickly.
                sock.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, True)
                # set a short timeout to trigger timeout retry more quickly.
                sock.settimeout(self.timeout)

                # TCP connect
                sock.connect(ip_port)

                # record TCP connection time
                conn_time = time.time() - start_time
                xlog.debug("tcp conn %s time:%d", ip, conn_time * 1000)

                google_ip.update_ip(ip, conn_time * 2000)
                #logging.info("create_tcp update ip:%s time:%d", ip, conn_time * 2000)

                # put ssl socket object to output queobj
                #sock.ip = ip
                self.tcp_connection_cache.put((time.time(), sock))
            except Exception as e:
                conn_time = int((time.time() - start_time) * 1000)
                xlog.debug("tcp conn %s fail t:%d", ip, conn_time)
                google_ip.report_connect_fail(ip)
                #logging.info("create_tcp report fail ip:%s", ip)
                if sock:
                    sock.close()
            finally:
                self.thread_num_lock.acquire()
                self.thread_num -= 1
                self.thread_num_lock.release()


        if host != "appengine.google.com":
            while True:
                try:
                    ctime, sock = self.tcp_connection_cache.get_nowait()
                    if time.time() - ctime < sock_life:
                        return sock
                    else:
                        sock.close()
                        continue
                except Queue.Empty:
                    break

        start_time = time.time()
        while time.time() - start_time < self.max_timeout:

            if self.thread_num < self.max_thread_num:
                if host == "appengine.google.com":
                    ip = google_ip.get_host_ip("*.google.com")
                else:
                    ip = google_ip.get_gws_ip()
                if not ip:
                    xlog.error("no gws ip.")
                    return
                addr = (ip, port)
                self.thread_num_lock.acquire()
                self.thread_num += 1
                self.thread_num_lock.release()
                p = threading.Thread(target=_create_connection, args=(addr,))
                p.start()

            try:
                ctime, sock = self.tcp_connection_cache.get(timeout=0.2)
                return sock
            except:
                continue
        xlog.warning('create tcp connection fail.')
コード例 #46
0
ファイル: gae_handler.py プロジェクト: cc419378878/XX-Net
    def __fetchlet(self, range_queue, data_queue, range_delay_size):
        headers = dict((k.title(), v) for k, v in self.headers.items())
        headers["Connection"] = "close"
        while not self._stopped:
            try:
                try:
                    start, end, response = range_queue.get(timeout=1)
                    if (
                        self.expect_begin < start
                        and data_queue.qsize() * self.bufsize + range_delay_size > 30 * 1024 * 1024
                    ):
                        range_queue.put((start, end, response))
                        time.sleep(10)
                        continue
                    headers["Range"] = "bytes=%d-%d" % (start, end)
                    if not response:
                        response = fetch(self.method, self.url, headers, self.body)
                except Queue.Empty:
                    continue
                except Exception as e:
                    xlog.warning("RangeFetch fetch response %r in __fetchlet", e)
                    range_queue.put((start, end, None))
                    continue

                if not response:
                    xlog.warning("RangeFetch %s return %r", headers["Range"], response)
                    range_queue.put((start, end, None))
                    continue
                if response.app_status != 200:
                    xlog.warning(
                        'Range Fetch return %s "%s %s" %s ',
                        response.app_status,
                        self.method,
                        self.url,
                        headers["Range"],
                    )

                    if response.app_status == 404:
                        xlog.warning("APPID %r not exists, remove it.", response.ssl_sock.appid)
                        appid_manager.report_not_exist(response.ssl_sock.appid)
                        appid = appid_manager.get_appid()
                        if not appid:
                            xlog.error("no appid left")
                            self._stopped = True
                            response.close()
                            return

                    if response.app_status == 503:
                        xlog.warning("APPID %r out of Quota, remove it temporary.", response.ssl_sock.appid)
                        appid_manager.report_out_of_quota(response.ssl_sock.appid)
                        appid = appid_manager.get_appid()
                        if not appid:
                            xlog.error("no appid left")
                            self._stopped = True
                            response.close()
                            return

                    response.close()
                    range_queue.put((start, end, None))
                    continue

                if response.getheader("Location"):
                    self.url = urlparse.urljoin(self.url, response.getheader("Location"))
                    xlog.info("RangeFetch Redirect(%r)", self.url)
                    response.close()
                    range_queue.put((start, end, None))
                    continue

                if 200 <= response.status < 300:
                    content_range = response.getheader("Content-Range")
                    if not content_range:
                        xlog.warning(
                            'RangeFetch "%s %s" return Content-Range=%r: response headers=%r, retry %s-%s',
                            self.method,
                            self.url,
                            content_range,
                            response.getheaders(),
                            start,
                            end,
                        )
                        response.close()
                        range_queue.put((start, end, None))
                        continue
                    content_length = int(response.getheader("Content-Length", 0))
                    xlog.info(
                        ">>>>>>>>>>>>>>> [thread %s] %s %s",
                        threading.currentThread().ident,
                        content_length,
                        content_range,
                    )

                    time_last_read = time.time()
                    while start < end + 1:
                        try:
                            data = response.read(self.bufsize)
                            if not data:
                                if time.time() - time_last_read > 20:
                                    break
                                else:
                                    time.sleep(0.1)
                                    continue

                            time_last_read = time.time()
                            data_len = len(data)
                            data_queue.put((start, data))
                            start += data_len

                        except Exception as e:
                            xlog.warning('RangeFetch "%s %s" %s failed: %s', self.method, self.url, headers["Range"], e)
                            break

                    if start < end + 1:
                        xlog.warning('RangeFetch "%s %s" retry %s-%s', self.method, self.url, start, end)
                        response.close()
                        range_queue.put((start, end, None))
                        continue

                    https_manager.save_ssl_connection_for_reuse(response.ssl_sock)
                    xlog.info(">>>>>>>>>>>>>>> Successfully reached %d bytes.", start - 1)
                else:
                    xlog.error("RangeFetch %r return %s", self.url, response.status)
                    response.close()
                    range_queue.put((start, end, None))
                    continue
            except StandardError as e:
                xlog.exception("RangeFetch._fetchlet error:%s", e)
                raise
コード例 #47
0
ファイル: proxy_handler.py プロジェクト: 23niu/XX-Net
    def do_CONNECT_DIRECT(self):
        """deploy fake cert to client"""
        host, _, port = self.path.rpartition(':')
        port = int(port)
        if port != 443:
            xlog.warn("CONNECT %s port:%d not support", host, port)
            return

        certfile = CertUtil.get_cert(host)
        xlog.info('GAE %s %s:%d ', self.command, host, port)
        self.__realconnection = None
        self.wfile.write(b'HTTP/1.1 200 OK\r\n\r\n')

        try:
            ssl_sock = ssl.wrap_socket(self.connection, keyfile=certfile, certfile=certfile, server_side=True)
        except ssl.SSLError as e:
            xlog.info('ssl error: %s, create full domain cert for host:%s', e, host)
            certfile = CertUtil.get_cert(host, full_name=True)
            return
        except Exception as e:
            if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET):
                xlog.exception('ssl.wrap_socket(self.connection=%r) failed: %s path:%s, errno:%s', self.connection, e, self.path, e.args[0])
            return

        self.__realconnection = self.connection
        self.__realwfile = self.wfile
        self.__realrfile = self.rfile
        self.connection = ssl_sock
        self.rfile = self.connection.makefile('rb', self.bufsize)
        self.wfile = self.connection.makefile('wb', 0)

        try:
            self.raw_requestline = self.rfile.readline(65537)
            if len(self.raw_requestline) > 65536:
                self.requestline = ''
                self.request_version = ''
                self.command = ''
                self.send_error(414)
                return
            if not self.raw_requestline:
                self.close_connection = 1
                return
            if not self.parse_request():
                return
        except NetWorkIOError as e:
            if e.args[0] not in (errno.ECONNABORTED, errno.ECONNRESET, errno.EPIPE):
                raise
        if self.path[0] == '/' and host:
            self.path = 'https://%s%s' % (self.headers['Host'], self.path)
        xlog.debug('GAE CONNECT %s %s', self.command, self.path)

        try:
            if self.path[0] == '/' and host:
                self.path = 'http://%s%s' % (host, self.path)
            elif not host and '://' in self.path:
                host = urlparse.urlparse(self.path).netloc

            self.parsed_url = urlparse.urlparse(self.path)
            if len(self.parsed_url[4]):
                path = '?'.join([self.parsed_url[2], self.parsed_url[4]])
            else:
                path = self.parsed_url[2]

            request_headers = dict((k.title(), v) for k, v in self.headers.items())

            payload = b''
            if 'Content-Length' in request_headers:
                try:
                    payload_len = int(request_headers.get('Content-Length', 0))
                    #logging.debug("payload_len:%d %s %s", payload_len, self.command, self.path)
                    payload = self.rfile.read(payload_len)
                except NetWorkIOError as e:
                    xlog.error('handle_method_urlfetch read payload failed:%s', e)
                    return

            direct_handler.handler(self.command, host, path, request_headers, payload, self.wfile)


        except NetWorkIOError as e:
            if e.args[0] not in (errno.ECONNABORTED, errno.ETIMEDOUT, errno.EPIPE):
                raise
        finally:
            if self.__realconnection:
                try:
                    self.__realconnection.shutdown(socket.SHUT_WR)
                    self.__realconnection.close()
                except NetWorkIOError:
                    pass
                finally:
                    self.__realconnection = None
コード例 #48
0
    def create_connection(self, host="", port=443, sock_life=5):
        if port != 443:
            xlog.warn("forward port %d not supported.", port)
            return None

        def _create_connection(ip_port, delay=0):
            time.sleep(delay)
            ip = ip_port[0]
            sock = None
            # start connection time record
            start_time = time.time()
            conn_time = 0
            try:
                # create a ipv4/ipv6 socket object
                if config.PROXY_ENABLE:
                    sock = socks.socksocket(socket.AF_INET if ':' not in
                                            ip else socket.AF_INET6)
                else:
                    sock = socket.socket(socket.AF_INET if ':' not in
                                         ip else socket.AF_INET6)
                # set reuseaddr option to avoid 10048 socket error
                sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
                # resize socket recv buffer 8K->32K to improve browser releated application performance
                sock.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 32 * 1024)
                # disable negal algorithm to send http request quickly.
                sock.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, True)
                # set a short timeout to trigger timeout retry more quickly.
                sock.settimeout(self.timeout)

                # TCP connect
                sock.connect(ip_port)

                # record TCP connection time
                conn_time = time.time() - start_time
                xlog.debug("tcp conn %s time:%d", ip, conn_time * 1000)

                google_ip.update_ip(ip, conn_time * 2000)
                #logging.info("create_tcp update ip:%s time:%d", ip, conn_time * 2000)

                # put ssl socket object to output queobj
                #sock.ip = ip
                self.tcp_connection_cache.put((time.time(), sock))
            except Exception as e:
                conn_time = int((time.time() - start_time) * 1000)
                xlog.debug("tcp conn %s fail t:%d", ip, conn_time)
                google_ip.report_connect_fail(ip)
                #logging.info("create_tcp report fail ip:%s", ip)
                if sock:
                    sock.close()
            finally:
                self.thread_num_lock.acquire()
                self.thread_num -= 1
                self.thread_num_lock.release()

        if host != "appengine.google.com":
            while True:
                try:
                    ctime, sock = self.tcp_connection_cache.get_nowait()
                    if time.time() - ctime < sock_life:
                        return sock
                    else:
                        sock.close()
                        continue
                except Queue.Empty:
                    break

        start_time = time.time()
        while time.time() - start_time < self.max_timeout:

            if self.thread_num < self.max_thread_num:
                if host == "appengine.google.com":
                    ip = google_ip.get_host_ip("*.google.com")
                else:
                    ip = google_ip.get_gws_ip()
                if not ip:
                    xlog.error("no gws ip.")
                    return
                addr = (ip, port)
                self.thread_num_lock.acquire()
                self.thread_num += 1
                self.thread_num_lock.release()
                p = threading.Thread(target=_create_connection, args=(addr, ))
                p.daemon = True
                p.start()

            try:
                ctime, sock = self.tcp_connection_cache.get(timeout=0.2)
                return sock
            except:
                continue
        xlog.warning('create tcp connection fail.')
コード例 #49
0
ファイル: gae_handler.py プロジェクト: cc419378878/XX-Net
    def fetch(self):
        response_headers = dict((k.title(), v) for k, v in self.response.getheaders())
        content_range = response_headers["Content-Range"]
        start, end, length = tuple(int(x) for x in re.search(r"bytes (\d+)-(\d+)/(\d+)", content_range).group(1, 2, 3))
        if start == 0:
            response_headers["Content-Length"] = str(length)
            del response_headers["Content-Range"]
        else:
            response_headers["Content-Range"] = "bytes %s-%s/%s" % (start, end, length)
            response_headers["Content-Length"] = str(length - start)

        xlog.info(">>>>>>>>>>>>>>> RangeFetch started(%r) %d-%d", self.url, start, end)

        try:
            self.wfile.write("HTTP/1.1 200 OK\r\n")
            for key in response_headers:
                if key == "Transfer-Encoding":
                    continue
                if key == "X-Head-Content-Length":
                    continue
                if key in skip_headers:
                    continue
                value = response_headers[key]
                # logging.debug("Head %s: %s", key.title(), value)
                send_header(self.wfile, key, value)
            self.wfile.write("\r\n")
        except Exception as e:
            self._stopped = True
            xlog.warn("RangeFetch send response fail:%r %s", e, self.url)
            return

        data_queue = Queue.PriorityQueue()
        range_queue = Queue.PriorityQueue()
        range_queue.put((start, end, self.response))
        self.expect_begin = start
        for begin in range(end + 1, length, self.maxsize):
            range_queue.put((begin, min(begin + self.maxsize - 1, length - 1), None))
        for i in xrange(0, self.threads):
            range_delay_size = i * self.maxsize
            spawn_later(
                float(range_delay_size) / self.waitsize, self.__fetchlet, range_queue, data_queue, range_delay_size
            )

        has_peek = hasattr(data_queue, "peek")
        peek_timeout = 120
        while self.expect_begin < length - 1:
            try:
                if has_peek:
                    begin, data = data_queue.peek(timeout=peek_timeout)
                    if self.expect_begin == begin:
                        data_queue.get()
                    elif self.expect_begin < begin:
                        time.sleep(0.1)
                        continue
                    else:
                        xlog.error("RangeFetch Error: begin(%r) < expect_begin(%r), quit.", begin, self.expect_begin)
                        break
                else:
                    begin, data = data_queue.get(timeout=peek_timeout)
                    if self.expect_begin == begin:
                        pass
                    elif self.expect_begin < begin:
                        data_queue.put((begin, data))
                        time.sleep(0.1)
                        continue
                    else:
                        xlog.error("RangeFetch Error: begin(%r) < expect_begin(%r), quit.", begin, self.expect_begin)
                        break
            except Queue.Empty:
                xlog.error("data_queue peek timeout, break")
                break

            try:
                ret = self.wfile.write(data)
                if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ:
                    xlog.debug("send to browser wfile.write ret:%d, retry", ret)
                    ret = self.wfile.write(data)
                    xlog.debug("send to browser wfile.write ret:%d", ret)
                self.expect_begin += len(data)
                del data
            except Exception as e:
                xlog.warn("RangeFetch client closed(%s). %s", e, self.url)
                break
        self._stopped = True