def request_gae_proxy(method, url, headers, body): # make retry and time out time_request = time.time() request_headers, request_body = pack_request(method, url, headers, body) error_msg = [] while True: if time.time() - time_request > 60: #time out raise Exception(600, b"".join(error_msg)) try: response = request_gae_server(request_headers, request_body) check_local_network.report_network_ok() response = unpack_response(response) return response except GAE_Exception as e: err_msg = "gae_exception:%r %s" % (e, url) error_msg.append(err_msg) xlog.warn("gae_exception:%r %s", e, url) except Exception as e: err_msg = 'gae_handler.handler %r %s , retry...' % ( e, url) error_msg.append(err_msg) xlog.exception('gae_handler.handler %r %s , retry...', e, url)
def update_ip(self, ip, handshake_time): if not isinstance(ip, basestring): xlog.error("set_ip input") return handshake_time = int(handshake_time) if handshake_time < 5: # that's impossible xlog.warn("%s handshake:%d impossible", ip, 1000 * handshake_time) return time_now = time.time() check_local_network.report_network_ok() check_ip.last_check_time = time_now check_ip.continue_fail_count = 0 self.ip_lock.acquire() try: if ip in self.ip_dict: # Case: some good ip, average handshake time is 300ms # some times ip package lost cause handshake time become 2000ms # this ip will not return back to good ip front until all become bad # There for, prevent handshake time increase too quickly. org_time = self.ip_dict[ip]['handshake_time'] if handshake_time - org_time > 500: self.ip_dict[ip]['handshake_time'] = org_time + 500 else: self.ip_dict[ip]['handshake_time'] = handshake_time self.ip_dict[ip]['success_time'] = time_now if self.ip_dict[ip]['fail_times'] > 0: self.good_ip_num += 1 self.bad_ip_num -= 1 self.ip_dict[ip]['fail_times'] = 0 self.append_ip_history(ip, handshake_time) self.ip_dict[ip]["fail_time"] = 0 self.iplist_need_save = True #xlog.debug("update ip:%s not exist", ip) except Exception as e: xlog.error("update_ip err:%s", e) finally: self.ip_lock.release() self.save_ip_list()
def update_ip(self, ip, handshake_time): if not isinstance(ip, basestring): xlog.error("set_ip input") return handshake_time = int(handshake_time) if handshake_time < 5: # that's impossible xlog.warn("%s handshake:%d impossible", ip, 1000 * handshake_time) return time_now = time.time() check_local_network.report_network_ok() check_ip.last_check_time = time_now check_ip.continue_fail_count = 0 self.ip_lock.acquire() try: if ip in self.ip_dict: # Case: some good ip, average handshake time is 300ms # some times ip package lost cause handshake time become 2000ms # this ip will not return back to good ip front until all become bad # There for, prevent handshake time increase too quickly. org_time = self.ip_dict[ip]['handshake_time'] if handshake_time - org_time > 500: self.ip_dict[ip]['handshake_time'] = org_time + 500 else: self.ip_dict[ip]['handshake_time'] = handshake_time self.ip_dict[ip]['success_time'] = time_now if self.ip_dict[ip]['fail_times'] > 0: self.good_ip_num += 1 self.ip_dict[ip]['fail_times'] = 0 self.append_ip_history(ip, handshake_time) self.ip_dict[ip]["fail_time"] = 0 self.iplist_need_save = True #xlog.debug("update ip:%s not exist", ip) except Exception as e: xlog.error("update_ip err:%s", e) finally: self.ip_lock.release() self.save_ip_list()
def request_gae_proxy(method, url, headers, body, timeout=60, retry=True): # make retry and time out time_request = time.time() request_headers, request_body = pack_request(method, url, headers, body) error_msg = [] while True: if time.time() - time_request > timeout: raise GAE_Exception(600, b"".join(error_msg)) if not retry and error_msg: raise GAE_Exception(600, b"".join(error_msg)) try: response = request_gae_server(request_headers, request_body, url, timeout) check_local_network.report_network_ok() response = unpack_response(response) if response.app_msg: xlog.warn("server app return fail, status:%d", response.app_status) # if len(response.app_msg) < 2048: #xlog.warn('app_msg:%s', cgi.escape(response.app_msg)) if response.app_status == 510: # reach 80% of traffic today # disable for get big file. appid_manager.report_out_of_quota(response.ssl_sock.appid) response.worker.close( "appid out of quota:%s" % response.ssl_sock.appid) continue return response except GAE_Exception as e: err_msg = "gae_exception:%r %s" % (e, url) error_msg.append(err_msg) xlog.warn("gae_exception:%r %s", e, url) except Exception as e: err_msg = 'gae_handler.handler %r %s , retry...' % (e, url) error_msg.append(err_msg) xlog.exception('gae_handler.handler %r %s , retry...', e, url)
def request_gae_proxy(method, url, headers, body): # make retry and time out time_request = time.time() request_headers, request_body = pack_request(method, url, headers, body) error_msg = [] while True: if time.time() - time_request > 60: # time out raise GAE_Exception(600, b"".join(error_msg)) try: response = request_gae_server(request_headers, request_body, url) check_local_network.report_network_ok() response = unpack_response(response) if response.app_msg: xlog.warn("server app return fail, status:%d", response.app_status) # if len(response.app_msg) < 2048: #xlog.warn('app_msg:%s', cgi.escape(response.app_msg)) if response.app_status == 510: # reach 80% of traffic today # disable for get big file. appid_manager.report_out_of_quota(response.ssl_sock.appid) response.worker.close( "appid out of quota:%s" % response.ssl_sock.appid) continue return response except GAE_Exception as e: err_msg = "gae_exception:%r %s" % (e, url) error_msg.append(err_msg) xlog.warn("gae_exception:%r %s", e, url) except Exception as e: err_msg = 'gae_handler.handler %r %s , retry...' % (e, url) error_msg.append(err_msg) xlog.exception('gae_handler.handler %r %s , retry...', e, url)
def fetch_by_gae(method, url, headers, body): if isinstance(body, basestring) and body: if len(body) < 10 * 1024 * 1024 and 'Content-Encoding' not in headers: zbody = deflate(body) if len(zbody) < len(body): body = zbody headers['Content-Encoding'] = 'deflate' if len(body) > 10 * 1024 * 1024: xlog.warn("body len:%d %s %s", len(body), method, url) headers['Content-Length'] = str(len(body)) # GAE don't allow set `Host` header if 'Host' in headers: del headers['Host'] kwargs = {} if config.GAE_PASSWORD: kwargs['password'] = config.GAE_PASSWORD #kwargs['options'] = #kwargs['validate'] = kwargs['maxsize'] = config.AUTORANGE_MAXSIZE kwargs['timeout'] = '19' payload = '%s %s HTTP/1.1\r\n' % (method, url) payload += ''.join('%s: %s\r\n' % (k, v) for k, v in headers.items() if k not in skip_headers) #for k, v in headers.items(): # xlog.debug("Send %s: %s", k, v) payload += ''.join('X-URLFETCH-%s: %s\r\n' % (k, v) for k, v in kwargs.items() if v) request_headers = {} payload = deflate(payload) body = '%s%s%s' % (struct.pack('!h', len(payload)), payload, body) request_headers['Content-Length'] = str(len(body)) response = http_dispatch.request(request_headers, body) response.app_status = response.status response.app_headers = response.headers if response.app_status != 200: return response check_local_network.report_network_ok() try: data = response.body.get(2) if len(data) < 2: xlog.warn("fetch too short lead byte len:%d %s", len(data), url) response.app_status = 502 # 502: Bad gateway response.fp = io.BytesIO( b'connection aborted. too short lead byte data=' + data) response.read = response.fp.read return response headers_length, = struct.unpack('!h', data) data = response.body.get(headers_length) if len(data) < headers_length: xlog.warn("fetch too short header need:%d get:%d %s", headers_length, len(data), url) response.app_status = 509 response.fp = io.BytesIO( b'connection aborted. too short headers data=' + data) response.read = response.fp.read return response raw_response_line, headers_data = inflate(data).split('\r\n', 1) _, response.status, response.reason = raw_response_line.split(None, 2) response.status = int(response.status) response.reason = response.reason.strip() headers_pairs = headers_data.split('\r\n') response.headers = {} for pair in headers_pairs: if not pair: break k, v = pair.split(': ', 1) response.headers[k] = v return response except Exception as e: raise GAE_Exception("unpack protocol:%r", e)
def fetch_by_gae(method, url, headers, body): if isinstance(body, basestring) and body: if len(body) < 10 * 1024 * 1024 and 'Content-Encoding' not in headers: zbody = deflate(body) if len(zbody) < len(body): body = zbody headers['Content-Encoding'] = 'deflate' if len(body) > 10 * 1024 * 1024: xlog.warn("body len:%d %s %s", len(body), method, url) headers['Content-Length'] = str(len(body)) # GAE don't allow set `Host` header if 'Host' in headers: del headers['Host'] kwargs = {} if config.GAE_PASSWORD: kwargs['password'] = config.GAE_PASSWORD #kwargs['options'] = #kwargs['validate'] = kwargs['maxsize'] = config.AUTORANGE_MAXSIZE kwargs['timeout'] = '19' payload = '%s %s HTTP/1.1\r\n' % (method, url) payload += ''.join('%s: %s\r\n' % (k, v) for k, v in headers.items() if k not in skip_headers) #for k, v in headers.items(): # xlog.debug("Send %s: %s", k, v) payload += ''.join('X-URLFETCH-%s: %s\r\n' % (k, v) for k, v in kwargs.items() if v) request_headers = {} payload = deflate(payload) body = '%s%s%s' % (struct.pack('!h', len(payload)), payload, body) request_headers['Content-Length'] = str(len(body)) response = http_dispatch.request(request_headers, body) response.app_status = response.status response.app_headers = response.headers if response.app_status != 200: return response check_local_network.report_network_ok() try: data = response.body.get(2) if len(data) < 2: xlog.warn("fetch too short lead byte len:%d %s", len(data), url) response.app_status = 502 # 502: Bad gateway response.fp = io.BytesIO(b'connection aborted. too short lead byte data=' + data) response.read = response.fp.read return response headers_length, = struct.unpack('!h', data) data = response.body.get(headers_length) if len(data) < headers_length: xlog.warn("fetch too short header need:%d get:%d %s", headers_length, len(data), url) response.app_status = 509 response.fp = io.BytesIO(b'connection aborted. too short headers data=' + data) response.read = response.fp.read return response raw_response_line, headers_data = inflate(data).split('\r\n', 1) _, response.status, response.reason = raw_response_line.split(None, 2) response.status = int(response.status) response.reason = response.reason.strip() headers_pairs = headers_data.split('\r\n') response.headers = {} for pair in headers_pairs: if not pair: break k, v = pair.split(': ', 1) response.headers[k] = v return response except Exception as e: raise GAE_Exception("unpack protocol:%r", e)