def fetch(method, host, path, headers, payload): request_data = '%s %s HTTP/1.1\r\n' % (method, path) request_data += ''.join('%s: %s\r\n' % (k, v) for k, v in headers.items()) request_data += '\r\n' ssl_sock = https_manager.get_ssl_connection(host) if not ssl_sock: return try: ssl_sock.send(request_data.encode()) payload_len = len(payload) start = 0 while start < payload_len: send_size = min(payload_len - start, 65535) sended = ssl_sock.send(payload[start:start + send_size]) start += sended response = httplib.HTTPResponse(ssl_sock, buffering=True) response.ssl_sock = ssl_sock orig_timeout = ssl_sock.gettimeout() ssl_sock.settimeout(90) response.begin() ssl_sock.settimeout(orig_timeout) except httplib.BadStatusLine as e: xlog.warn("direct_handler.fetch bad status line:%r", e) google_ip.report_connect_closed(ssl_sock.ip, "request_fail") response = None except Exception as e: xlog.warn("direct_handler.fetch:%r", e) google_ip.report_connect_closed(ssl_sock.ip, "request_fail") response = None return response
def keep_alive_thread(self): while self.keep_alive and connect_control.keep_running: if not connect_control.is_active(): time.sleep(1) continue new_list = self.new_conn_pool.get_need_keep_alive( maxtime=self.keep_alive - 3) old_list = self.gae_conn_pool.get_need_keep_alive( maxtime=self.keep_alive - 3) to_keep_live_list = new_list + old_list for ssl_sock in to_keep_live_list: inactive_time = time.time() - ssl_sock.last_use_time if inactive_time > self.keep_alive: google_ip.report_connect_closed(ssl_sock.ip, "alive_timeout") ssl_sock.close() else: self.start_keep_alive(ssl_sock) for host in self.host_conn_pool: host_list = self.host_conn_pool[host].get_need_keep_alive( maxtime=self.keep_alive - 3) for ssl_sock in host_list: google_ip.report_connect_closed(ssl_sock.ip, "host pool alive_timeout") ssl_sock.close() #self.create_more_connection() time.sleep(1)
def keep_alive_thread(self): while connect_control.keep_running: if not connect_control.is_active(): time.sleep(1) continue to_keep_live_list = self.new_conn_pool.get_need_keep_alive(maxtime=self.keep_alive-3) for ssl_sock in to_keep_live_list: inactive_time = time.time() - ssl_sock.last_use_time if inactive_time > self.keep_alive or not self.ssl_timeout_cb: google_ip.report_connect_closed(ssl_sock.ip, "alive_timeout") ssl_sock.close() else: # put ssl to worker try: self.ssl_timeout_cb(ssl_sock) except: # no appid avaiable pass for host in self.host_conn_pool: host_list = self.host_conn_pool[host].get_need_keep_alive(maxtime=self.keep_alive-3) for ssl_sock in host_list: google_ip.report_connect_closed(ssl_sock.ip, "host pool alive_timeout") ssl_sock.close() time.sleep(1)
def keep_alive_worker(self, sock): call_time = time.time() if self.head_request(sock): self.save_ssl_connection_for_reuse(sock, call_time=call_time) else: google_ip.report_connect_closed(sock.ip, "HEAD") sock.close()
def keep_alive_thread(self): while connect_control.keep_running: if not connect_control.is_active(): time.sleep(1) continue to_keep_live_list = self.new_conn_pool.get_need_keep_alive( maxtime=self.keep_alive - 3) for ssl_sock in to_keep_live_list: inactive_time = time.time() - ssl_sock.last_use_time if inactive_time > self.keep_alive or not self.ssl_timeout_cb: google_ip.report_connect_closed(ssl_sock.ip, "alive_timeout") ssl_sock.close() else: # put ssl to worker try: self.ssl_timeout_cb(ssl_sock) except Exception as e: xlog.exception("ssl_timeout_cb except:%r", e) # no appid avaiable pass for host in self.host_conn_pool: host_list = self.host_conn_pool[host].get_need_keep_alive( maxtime=self.keep_alive - 3) for ssl_sock in host_list: google_ip.report_connect_closed(ssl_sock.ip, "host pool alive_timeout") ssl_sock.close() time.sleep(1)
def keep_alive_thread(self): while self.keep_alive and connect_control.keep_running: if not connect_control.is_active(): time.sleep(1) continue new_list = self.new_conn_pool.get_need_keep_alive(maxtime=self.keep_alive-3) old_list = self.gae_conn_pool.get_need_keep_alive(maxtime=self.keep_alive-3) to_keep_live_list = new_list + old_list for ssl_sock in to_keep_live_list: inactive_time = time.time() - ssl_sock.last_use_time if inactive_time > self.keep_alive: google_ip.report_connect_closed(ssl_sock.ip, "alive_timeout") ssl_sock.close() else: self.start_keep_alive(ssl_sock) for host in self.host_conn_pool: host_list = self.host_conn_pool[host].get_need_keep_alive(maxtime=self.keep_alive-3) for ssl_sock in host_list: google_ip.report_connect_closed(ssl_sock.ip, "host pool alive_timeout") ssl_sock.close() #self.create_more_connection() time.sleep(1)
def fetch(method, host, path, headers, payload, bufsize=8192): request_data = '%s %s HTTP/1.1\r\n' % (method, path) request_data += ''.join('%s: %s\r\n' % (k, v) for k, v in headers.items()) request_data += '\r\n' ssl_sock = https_manager.get_ssl_connection(host) if not ssl_sock: return try: ssl_sock.send(request_data.encode()) payload_len = len(payload) start = 0 while start < payload_len: send_size = min(payload_len - start, 65535) sended = ssl_sock.send(payload[start:start+send_size]) start += sended response = httplib.HTTPResponse(ssl_sock, buffering=True) response.ssl_sock = ssl_sock orig_timeout = ssl_sock.gettimeout() ssl_sock.settimeout(90) response.begin() ssl_sock.settimeout(orig_timeout) except httplib.BadStatusLine as e: xlog.warn("direct_handler.fetch bad status line:%r", e) google_ip.report_connect_closed(ssl_sock.ip, "request_fail") response = None except Exception as e: xlog.warn("direct_handler.fetch:%r", e) google_ip.report_connect_closed(ssl_sock.ip, "request_fail") response = None return response
def _request(self, headers, payload): request_data = 'POST /_gh/ HTTP/1.1\r\n' request_data += ''.join('%s: %s\r\n' % (k, v) for k, v in headers.items()) request_data += '\r\n' try: self.ssl_sock.send(request_data.encode()) payload_len = len(payload) start = 0 while start < payload_len: send_size = min(payload_len - start, 65535) sended = self.ssl_sock.send(payload[start:start+send_size]) start += sended response = httplib.HTTPResponse(self.ssl_sock, buffering=True) self.ssl_sock.settimeout(100) response.begin() # read response body, body_length = int(response.getheader("Content-Length", "0")) start = 0 end = body_length last_read_time = time.time() time_response = time.time() response_body = [] while True: if start >= end: self.ssl_sock.received_size += body_length response.headers = response.msg.dict response.body = ReadBuffer(b''.join(response_body)) response.ssl_sock = self.ssl_sock response.worker = self return response to_read = end - start data = response.read(to_read) if not data: if time.time() - last_read_time > 20: google_ip.report_connect_closed(self.ssl_sock.ip, "down fail") response.close() xlog.warn("%s read timeout t:%d len:%d left:%d ", self.ip, (time.time()-time_response)*1000, body_length, (end-start)) return False else: time.sleep(0.1) continue last_read_time = time.time() data_len = len(data) start += data_len response_body.append(data) except httplib.BadStatusLine as e: xlog.warn("%s _request bad status line:%r", self.ip, e) pass except Exception as e: xlog.warn("%s _request:%r", self.ip, e) return False
def get_ssl_connection(self, host=''): ssl_sock = None if host: only_h1 = True if host in self.host_conn_pool: while True: ret = self.host_conn_pool[host].get_nowait(only_h1=True) if ret: handshake_time, ssl_sock = ret else: ssl_sock = None break if time.time( ) - ssl_sock.last_use_time < self.keep_alive + 1: xlog.debug("host_conn_pool %s get:%s handshake:%d", host, ssl_sock.ip, handshake_time) break else: google_ip.report_connect_closed( ssl_sock.ip, "get_timeout") ssl_sock.close() continue else: only_h1 = False while True: ret = self.gae_conn_pool.get_nowait() if ret: handshake_time, ssl_sock = ret else: ssl_sock = None break if time.time() - ssl_sock.last_use_time < self.keep_alive + 1: xlog.debug("ssl_pool.get:%s handshake:%d", ssl_sock.ip, handshake_time) break else: google_ip.report_connect_closed(ssl_sock.ip, "get_timeout") ssl_sock.close() continue self.create_more_connection() if ssl_sock: return ssl_sock else: ret = self.new_conn_pool.get(True, self.max_timeout, only_h1=only_h1) if ret: handshake_time, ssl_sock = ret return ssl_sock else: xlog.debug("create ssl timeout fail.") return None
def get_ssl_connection(self, host=''): ssl_sock = None if host: only_h1=True if host in self.host_conn_pool: while True: ret = self.host_conn_pool[host].get_nowait(only_h1=True) if ret: handshake_time, ssl_sock = ret else: ssl_sock = None break if time.time() - ssl_sock.last_use_time < self.keep_alive+1: xlog.debug("host_conn_pool %s get:%s handshake:%d", host, ssl_sock.ip, handshake_time) break else: google_ip.report_connect_closed(ssl_sock.ip, "get_timeout") ssl_sock.close() continue else: only_h1=False while True: ret = self.gae_conn_pool.get_nowait() if ret: handshake_time, ssl_sock = ret else: ssl_sock = None break if time.time() - ssl_sock.last_use_time < self.keep_alive+1: xlog.debug("ssl_pool.get:%s handshake:%d", ssl_sock.ip, handshake_time) break else: google_ip.report_connect_closed(ssl_sock.ip, "get_timeout") ssl_sock.close() continue self.create_more_connection() if ssl_sock: return ssl_sock else: ret = self.new_conn_pool.get(True, self.max_timeout, only_h1=only_h1) if ret: handshake_time, ssl_sock = ret return ssl_sock else: xlog.debug("create ssl timeout fail.") return None
def request_task(self, task): headers = task.headers payload = task.body headers['Host'] = self.ssl_sock.host response = self._request(headers, payload) if not response: google_ip.report_connect_closed(self.ssl_sock.ip, "request_fail") self.retry_task_cb(task) self.close("request fail") else: task.queue.put(response) self.accept_task = True self.processed_tasks += 1
def save_ssl_connection_for_reuse(self, ssl_sock, host=None, call_time=0): if call_time: ssl_sock.last_use_time = call_time else: ssl_sock.last_use_time = time.time() if host: if host not in self.host_conn_pool: self.host_conn_pool[host] = Connect_pool() self.host_conn_pool[host].put( (ssl_sock.handshake_time, ssl_sock) ) else: self.gae_conn_pool.put( (ssl_sock.handshake_time, ssl_sock) ) while self.gae_conn_pool.qsize() > self.connection_pool_max_num: handshake_time, ssl_sock = self.gae_conn_pool.get_slowest() google_ip.report_connect_closed(ssl_sock.ip, "slowest %d" % ssl_sock.handshake_time) ssl_sock.close()
def request(headers={}, payload=None): max_retry = 3 for i in range(max_retry): ssl_sock = None try: ssl_sock = https_manager.get_ssl_connection() if not ssl_sock: xlog.debug('create_ssl_connection fail') continue if ssl_sock.host == '': ssl_sock.appid = appid_manager.get_appid() if not ssl_sock.appid: google_ip.report_connect_closed(ssl_sock.ip, "no appid") raise GAE_Exception(1, "no appid can use") headers['Host'] = ssl_sock.appid + ".appspot.com" ssl_sock.host = headers['Host'] else: headers['Host'] = ssl_sock.host response = _request(ssl_sock, headers, payload) if not response: google_ip.report_connect_closed(ssl_sock.ip, "request_fail") ssl_sock.close() continue response.ssl_sock = ssl_sock return response except Exception as e: xlog.exception('request failed:%s', e) if ssl_sock: google_ip.report_connect_closed(ssl_sock.ip, "request_except") ssl_sock.close() raise GAE_Exception(2, "try max times")
def _request(self, headers, payload): request_data = 'POST /_gh/ HTTP/1.1\r\n' request_data += ''.join('%s: %s\r\n' % (k, v) for k, v in headers.items()) request_data += '\r\n' try: self.ssl_sock.send(request_data.encode()) payload_len = len(payload) start = 0 while start < payload_len: send_size = min(payload_len - start, 65535) sended = self.ssl_sock.send(payload[start:start + send_size]) start += sended response = httplib.HTTPResponse(self.ssl_sock, buffering=True) self.ssl_sock.settimeout(100) response.begin() # read response body, body_length = int(response.getheader("Content-Length", "0")) start = 0 end = body_length - 1 last_read_time = time.time() time_response = time.time() response_body = [] while True: if start > end: self.ssl_sock.received_size += body_length response.headers = response.msg.dict response.body = ReadBuffer(b''.join(response_body)) response.ssl_sock = self.ssl_sock response.worker = self return response data = response.read(65535) if not data: if time.time() - last_read_time > 20: google_ip.report_connect_closed( self.ssl_sock.ip, "down fail") response.close() xlog.warn("%s read timeout t:%d len:%d left:%d ", self.ip, (time.time() - time_response) * 1000, body_length, (end - start)) return False else: time.sleep(0.1) continue last_read_time = time.time() data_len = len(data) start += data_len response_body.append(data) except httplib.BadStatusLine as e: xlog.warn("%s _request bad status line:%r", self.ip, e) pass except Exception as e: xlog.warn("%s _request:%r", self.ip, e) return False
if content_range: start, end, length = tuple(int(x) for x in re.search(r'bytes (\d+)-(\d+)/(\d+)', content_range).group(1, 2, 3)) else: start, end, length = 0, content_length-1, content_length time_last_read = time.time() while True: if start > end: https_manager.save_ssl_connection_for_reuse(response.ssl_sock, host, call_time=time_request) xlog.info("DIRECT t:%d s:%d %d %s %s", (time.time()-time_request)*1000, length, response.status, host, url) return data = response.read(config.AUTORANGE_BUFSIZE) if not data: if time.time() - time_last_read > 20: google_ip.report_connect_closed(response.ssl_sock.ip, "receive fail") response.close() xlog.warn("read timeout t:%d len:%d left:%d %s %s", (time.time()-time_request)*1000, length, (end-start), host, url) return else: time.sleep(0.1) continue time_last_read = time.time() data_len = len(data) start += data_len if send_to_browser: try: ret = wfile.write(data) if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ: xlog.debug("send to browser wfile.write ret:%d", ret)
def handler(method, host, url, headers, body, wfile): time_request = time.time() if "Connection" in headers and headers["Connection"] == "close": del headers["Connection"] errors = [] response = None while True: if time.time() - time_request > 30: return return_fail_message(wfile) try: response = fetch(method, host, url, headers, body) if response: if response.status > 400: server_type = response.getheader('Server', "") if "gws" not in server_type and "Google Frontend" not in server_type: xlog.warn("IP:%s not support GAE, server type:%s status:%d", response.ssl_sock.ip, server_type, response.status) google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True) response.close() continue break except OpenSSL.SSL.SysCallError as e: errors.append(e) xlog.warn("direct_handler.handler err:%r %s/%s", e, host, url) except Exception as e: errors.append(e) xlog.exception('direct_handler.handler %r %s %s , retry...', e, host, url) try: send_to_browser = True try: response_headers = dict((k.title(), v) for k, v in response.getheaders()) wfile.write("HTTP/1.1 %d %s\r\n" % (response.status, response.reason)) for key, value in response.getheaders(): send_header(wfile, key, value) wfile.write("\r\n") except Exception as e: send_to_browser = False wait_time = time.time()-time_request xlog.warn("direct_handler.handler send response fail. t:%d e:%r %s%s", wait_time, e, host, url) if method == 'HEAD' or response.status in (204, 304): xlog.info("DIRECT t:%d %d %s %s", (time.time()-time_request)*1000, response.status, host, url) https_manager.save_ssl_connection_for_reuse(response.ssl_sock, host) response.close() return if 'Transfer-Encoding' in response_headers: length = 0 while True: try: data = response.read(8192) except httplib.IncompleteRead, e: data = e.partial except Exception as e: google_ip.report_connect_closed(response.ssl_sock.ip, "receive fail") xlog.warn("direct_handler.handler send Transfer-Encoding t:%d e:%r %s/%s", time.time()-time_request, e, host, url) response.close() return if send_to_browser: try: if not data: wfile.write('0\r\n\r\n') break length += len(data) wfile.write('%x\r\n' % len(data)) wfile.write(data) wfile.write('\r\n') except Exception as e: send_to_browser = False xlog.warn("direct_handler.handler send Transfer-Encoding t:%d e:%r %s/%s", time.time()-time_request, e, host, url) else: if not data: break
def handler(method, url, headers, body, wfile): time_request = time.time() headers = clean_empty_header(headers) errors = [] response = None while True: if time.time() - time_request > 30: #time out return return_fail_message(wfile) try: response = fetch(method, url, headers, body) if response.app_status != 200: xlog.warn("fetch gae status:%s url:%s", response.app_status, url) try: server_type = response.getheader('Server', "") if "gws" not in server_type and "Google Frontend" not in server_type and "GFE" not in server_type: xlog.warn("IP:%s not support GAE, server type:%s", response.ssl_sock.ip, server_type) google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True) response.close() continue except Exception as e: errors.append(e) xlog.warn('gae_handler.handler %r %s , retry...', e, url) continue if response.app_status == 404: #xlog.warning('APPID %r not exists, remove it.', response.ssl_sock.appid) appid_manager.report_not_exist(response.ssl_sock.appid, response.ssl_sock.ip) google_ip.report_connect_closed(response.ssl_sock.ip, "appid not exist") appid = appid_manager.get_appid() if not appid: html = generate_message_html('404 No usable Appid Exists', u'没有可用appid了,请配置可用的appid') send_response(wfile, 404, body=html.encode('utf-8')) response.close() return else: response.close() continue if response.app_status == 403 or response.app_status == 405: #Method not allowed # google have changed from gws to gvs, need to remove. xlog.warning('405 Method not allowed. remove %s ', response.ssl_sock.ip) # some ip can connect, and server type is gws # but can't use as GAE server # so we need remove it immediately google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True) response.close() continue if response.app_status == 503: xlog.warning('APPID %r out of Quota, remove it. %s', response.ssl_sock.appid, response.ssl_sock.ip) appid_manager.report_out_of_quota(response.ssl_sock.appid) google_ip.report_connect_closed(response.ssl_sock.ip, "out of quota") appid = appid_manager.get_appid() if not appid: html = generate_message_html('503 No usable Appid Exists', u'appid流量不足,请增加appid') send_response(wfile, 503, body=html.encode('utf-8')) response.close() return else: response.close() continue if response.app_status < 500: break except GAE_Exception as e: errors.append(e) xlog.warn("gae_exception:%r %s", e, url) except Exception as e: errors.append(e) xlog.exception('gae_handler.handler %r %s , retry...', e, url) if response.status == 206: return RangeFetch(method, url, headers, body, response, wfile).fetch() try: wfile.write("HTTP/1.1 %d %s\r\n" % (response.status, response.reason)) response_headers = {} for key, value in response.getheaders(): key = key.title() if key == 'Transfer-Encoding': #http://en.wikipedia.org/wiki/Chunked_transfer_encoding continue if key in skip_headers: continue response_headers[key] = value if 'X-Head-Content-Length' in response_headers: if method == "HEAD": response_headers['Content-Length'] = response_headers['X-Head-Content-Length'] del response_headers['X-Head-Content-Length'] send_to_browser = True try: for key in response_headers: value = response_headers[key] send_header(wfile, key, value) #logging.debug("Head- %s: %s", key, value) wfile.write("\r\n") except Exception as e: send_to_browser = False xlog.warn("gae_handler.handler send response fail. t:%d e:%r %s", time.time()-time_request, e, url) if len(response.app_msg): xlog.warn("APPID error:%d url:%s", response.status, url) wfile.write(response.app_msg) google_ip.report_connect_closed(response.ssl_sock.ip, "app err") response.close() return content_length = int(response.getheader('Content-Length', 0)) content_range = response.getheader('Content-Range', '') if content_range: start, end, length = tuple(int(x) for x in re.search(r'bytes (\d+)-(\d+)/(\d+)', content_range).group(1, 2, 3)) else: start, end, length = 0, content_length-1, content_length body_length = end - start + 1 last_read_time = time.time() time_response = time.time() while True: if start > end: time_finished = time.time() if body_length > 1024 and time_finished - time_response > 0: speed = body_length / (time_finished - time_response) xlog.info("GAE %d|%s|%d t:%d s:%d hs:%d Spd:%d %d %s", response.ssl_sock.fd, response.ssl_sock.ip, response.ssl_sock.received_size, (time_finished-time_request)*1000, length, response.ssl_sock.handshake_time, int(speed), response.status, url) else: xlog.info("GAE %d|%s|%d t:%d s:%d hs:%d %d %s", response.ssl_sock.fd, response.ssl_sock.ip, response.ssl_sock.received_size, (time_finished-time_request)*1000, length, response.ssl_sock.handshake_time, response.status, url) response.ssl_sock.received_size += body_length https_manager.save_ssl_connection_for_reuse(response.ssl_sock, call_time=time_request) return data = response.read(config.AUTORANGE_BUFSIZE) if not data: if time.time() - last_read_time > 20: google_ip.report_connect_closed(response.ssl_sock.ip, "down fail") response.close() xlog.warn("read timeout t:%d len:%d left:%d %s", (time.time()-time_request)*1000, length, (end-start), url) return else: time.sleep(0.1) continue last_read_time = time.time() data_len = len(data) start += data_len if send_to_browser: try: ret = wfile.write(data) if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ: xlog.debug("send to browser wfile.write ret:%d", ret) ret = wfile.write(data) except Exception as e_b: if e_b[0] in (errno.ECONNABORTED, errno.EPIPE, errno.ECONNRESET) or 'bad write retry' in repr(e_b): xlog.warn('gae_handler send to browser return %r %r', e_b, url) else: xlog.warn('gae_handler send to browser return %r %r', e_b, url) send_to_browser = False except NetWorkIOError as e: time_except = time.time() time_cost = time_except - time_request if e[0] in (errno.ECONNABORTED, errno.EPIPE) or 'bad write retry' in repr(e): xlog.warn("gae_handler err:%r time:%d %s ", e, time_cost, url) google_ip.report_connect_closed(response.ssl_sock.ip, "Net") else: xlog.exception("gae_handler except:%r %s", e, url) except Exception as e: xlog.exception("gae_handler except:%r %s", e, url)
def handler(method, url, headers, body, wfile): time_request = time.time() headers = clean_empty_header(headers) errors = [] response = None while True: if time.time() - time_request > 30: #time out return return_fail_message(wfile) try: response = fetch(method, url, headers, body) if response.app_status != 200: xlog.warn("fetch gae status:%s url:%s", response.app_status, url) try: server_type = response.getheader('Server', "") if "gws" not in server_type and "Google Frontend" not in server_type and "GFE" not in server_type: xlog.warn("IP:%s not support GAE, server type:%s", response.ssl_sock.ip, server_type) google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True) response.close() continue except Exception as e: errors.append(e) xlog.warn('gae_handler.handler %r %s , retry...', e, url) continue else: xlog.info( 'good ip num:%d, bad ip num:%s', google_ip.max_good_ip_num if google_ip.good_ip_num > google_ip.max_good_ip_num else google_ip.good_ip_num, google_ip.bad_ip_num) if response.app_status == 404: #xlog.warning('APPID %r not exists, remove it.', response.ssl_sock.appid) appid_manager.report_not_exist(response.ssl_sock.appid, response.ssl_sock.ip) google_ip.report_connect_closed(response.ssl_sock.ip, "appid not exist") appid = appid_manager.get_appid() if not appid: html = generate_message_html('404 No usable Appid Exists', u'没有可用appid了,请配置可用的appid') send_response(wfile, 404, body=html.encode('utf-8')) response.close() return else: response.close() continue if response.app_status == 403 or response.app_status == 405: #Method not allowed # google have changed from gws to gvs, need to remove. xlog.warning('405 Method not allowed. remove %s ', response.ssl_sock.ip) # some ip can connect, and server type is gws # but can't use as GAE server # so we need remove it immediately google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True) response.close() continue if response.app_status == 503: xlog.warning('APPID %r out of Quota, remove it. %s', response.ssl_sock.appid, response.ssl_sock.ip) appid_manager.report_out_of_quota(response.ssl_sock.appid) google_ip.report_connect_closed(response.ssl_sock.ip, "out of quota") appid = appid_manager.get_appid() if not appid: html = generate_message_html('503 No usable Appid Exists', u'appid流量不足,请增加appid') send_response(wfile, 503, body=html.encode('utf-8')) response.close() return else: response.close() continue if response.app_status < 500: break except GAE_Exception as e: errors.append(e) xlog.warn("gae_exception:%r %s", e, url) except Exception as e: errors.append(e) xlog.exception('gae_handler.handler %r %s , retry...', e, url) if response.status == 206: return RangeFetch(method, url, headers, body, response, wfile).fetch() try: response_headers = {} for key, value in response.getheaders(): key = key.title() if key == 'Transfer-Encoding': #http://en.wikipedia.org/wiki/Chunked_transfer_encoding continue if key in skip_headers: continue response_headers[key] = value if 'X-Head-Content-Length' in response_headers: if method == "HEAD": response_headers['Content-Length'] = response_headers[ 'X-Head-Content-Length'] del response_headers['X-Head-Content-Length'] send_to_browser = True try: wfile.write("HTTP/1.1 %d %s\r\n" % (response.status, response.reason)) for key in response_headers: value = response_headers[key] send_header(wfile, key, value) #logging.debug("Head- %s: %s", key, value) wfile.write("\r\n") except Exception as e: send_to_browser = False xlog.warn("gae_handler.handler send response fail. t:%d e:%r %s", time.time() - time_request, e, url) if len(response.app_msg): xlog.warn("APPID error:%d url:%s", response.status, url) wfile.write(response.app_msg) google_ip.report_connect_closed(response.ssl_sock.ip, "app err") response.close() return content_length = int(response.getheader('Content-Length', 0)) content_range = response.getheader('Content-Range', '') if content_range: start, end, length = tuple( int(x) for x in re.search(r'bytes (\d+)-(\d+)/(\d+)', content_range).group(1, 2, 3)) else: start, end, length = 0, content_length - 1, content_length body_length = end - start + 1 last_read_time = time.time() time_response = time.time() while True: if start > end: time_finished = time.time() if body_length > 1024 and time_finished - time_response > 0: speed = body_length / (time_finished - time_response) xlog.info("GAE %d|%s|%d t:%d s:%d hs:%d Spd:%d %d %s", response.ssl_sock.fd, response.ssl_sock.ip, response.ssl_sock.received_size, (time_finished - time_request) * 1000, length, response.ssl_sock.handshake_time, int(speed), response.status, url) else: xlog.info("GAE %d|%s|%d t:%d s:%d hs:%d %d %s", response.ssl_sock.fd, response.ssl_sock.ip, response.ssl_sock.received_size, (time_finished - time_request) * 1000, length, response.ssl_sock.handshake_time, response.status, url) response.ssl_sock.received_size += body_length https_manager.save_ssl_connection_for_reuse( response.ssl_sock, call_time=time_request) return data = response.read(config.AUTORANGE_BUFSIZE) if not data: if time.time() - last_read_time > 20: google_ip.report_connect_closed(response.ssl_sock.ip, "down fail") response.close() xlog.warn("read timeout t:%d len:%d left:%d %s", (time.time() - time_request) * 1000, length, (end - start), url) return else: time.sleep(0.1) continue last_read_time = time.time() data_len = len(data) start += data_len if send_to_browser: try: ret = wfile.write(data) if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ: xlog.debug("send to browser wfile.write ret:%d", ret) ret = wfile.write(data) except Exception as e_b: if e_b[0] in (errno.ECONNABORTED, errno.EPIPE, errno.ECONNRESET ) or 'bad write retry' in repr(e_b): xlog.warn('gae_handler send to browser return %r %r', e_b, url) else: xlog.warn('gae_handler send to browser return %r %r', e_b, url) send_to_browser = False except NetWorkIOError as e: time_except = time.time() time_cost = time_except - time_request if e[0] in (errno.ECONNABORTED, errno.EPIPE) or 'bad write retry' in repr(e): xlog.warn("gae_handler err:%r time:%d %s ", e, time_cost, url) google_ip.report_connect_closed(response.ssl_sock.ip, "Net") else: xlog.exception("gae_handler except:%r %s", e, url) except Exception as e: xlog.exception("gae_handler except:%r %s", e, url)
def request_task(self, task): task.set_state("h1_req") task.headers['Host'] = self.ssl_sock.host request_data = 'POST /_gh/ HTTP/1.1\r\n' request_data += ''.join('%s: %s\r\n' % (k, v) for k, v in task.headers.items()) request_data += '\r\n' try: self.ssl_sock.send(request_data.encode()) payload_len = len(task.body) start = 0 while start < payload_len: send_size = min(payload_len - start, 65535) sended = self.ssl_sock.send(task.body[start:start+send_size]) start += sended response = httplib.HTTPResponse(self.ssl_sock, buffering=True) self.ssl_sock.settimeout(100) response.begin() #except httplib.BadStatusLine as e: # xlog.warn("%s _request bad status line:%r", self.ip, e) except Exception as e: xlog.warn("%s h1_request:%r", self.ip, e) google_ip.report_connect_closed(self.ssl_sock.ip, "request_fail") self.retry_task_cb(task) self.task = None self.close("request fail") return task.set_state("h1_get_head") body_length = int(response.getheader("Content-Length", "0")) task.content_length = body_length task.responsed = True response.headers = response.msg.dict response.worker = self response.task = task response.ssl_sock = self.ssl_sock task.queue.put(response) if body_length == 0: self.accept_task = True self.processed_tasks += 1 return # read response body, try: start = 0 end = body_length time_response = last_read_time = time.time() while True: if start >= end: self.ssl_sock.received_size += body_length time_cost = (time.time() - time_response) if time_cost != 0: speed = body_length / time_cost task.set_state("h1_finish[SP:%d]" % speed) self.report_speed(speed, body_length) self.task = None self.accept_task = True self.idle_cb() self.processed_tasks += 1 self.last_active_time = time.time() return to_read = max(end - start, 65535) data = response.read(to_read) if not data: if time.time() - last_read_time > 20: xlog.warn("%s read timeout t:%d expect:%d left:%d ", self.ip, (time.time()-time_response)*1000, body_length, (end-start)) break else: time.sleep(0.1) continue last_read_time = time.time() data_len = len(data) start += data_len task.put_data(data) except Exception as e: xlog.warn("%s h1_request:%r", self.ip, e) task.put_data("") google_ip.report_connect_closed(self.ssl_sock.ip, "down fail") self.close("request body fail")
def handler(method, host, url, headers, body, wfile): time_request = time.time() if "Connection" in headers and headers["Connection"] == "close": del headers["Connection"] errors = [] response = None while True: if time.time() - time_request > 30: return return_fail_message(wfile) try: response = fetch(method, host, url, headers, body) if response: if response.status > 400: server_type = response.getheader('Server', "") if "gws" not in server_type and "Google Frontend" not in server_type and "GFE" not in server_type: xlog.warn("IP:%s not support GAE, server type:%s status:%d", response.ssl_sock.ip, server_type, response.status) google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True) response.close() continue break except OpenSSL.SSL.SysCallError as e: errors.append(e) xlog.warn("direct_handler.handler err:%r %s/%s", e, host, url) except Exception as e: errors.append(e) xlog.exception('direct_handler.handler %r %s %s , retry...', e, host, url) try: send_to_browser = True try: response_headers = dict((k.title(), v) for k, v in response.getheaders()) wfile.write("HTTP/1.1 %d %s\r\n" % (response.status, response.reason)) for key, value in response.getheaders(): send_header(wfile, key, value) wfile.write("\r\n") except Exception as e: send_to_browser = False wait_time = time.time()-time_request xlog.warn("direct_handler.handler send response fail. t:%d e:%r %s%s", wait_time, e, host, url) if method == 'HEAD' or response.status in (204, 304): xlog.info("DIRECT t:%d %d %s %s", (time.time()-time_request)*1000, response.status, host, url) https_manager.save_ssl_connection_for_reuse(response.ssl_sock, host) response.close() return if 'Transfer-Encoding' in response_headers: length = 0 while True: try: data = response.read(8192) except httplib.IncompleteRead, e: data = e.partial except Exception as e: google_ip.report_connect_closed(response.ssl_sock.ip, "receive fail") xlog.warn("direct_handler.handler send Transfer-Encoding t:%d e:%r %s/%s", time.time()-time_request, e, host, url) response.close() return if send_to_browser: try: if not data: wfile.write('0\r\n\r\n') break length += len(data) wfile.write('%x\r\n' % len(data)) wfile.write(data) wfile.write('\r\n') except Exception as e: send_to_browser = False xlog.warn("direct_handler.handler send Transfer-Encoding t:%d e:%r %s/%s", time.time()-time_request, e, host, url) else: if not data: break
def request_task(self, task): task.set_state("h1_req") task.headers['Host'] = self.ssl_sock.host request_data = 'POST /_gh/ HTTP/1.1\r\n' request_data += ''.join('%s: %s\r\n' % (k, v) for k, v in task.headers.items()) request_data += '\r\n' try: self.ssl_sock.send(request_data.encode()) payload_len = len(task.body) start = 0 while start < payload_len: send_size = min(payload_len - start, 65535) sended = self.ssl_sock.send(task.body[start:start + send_size]) start += sended response = httplib.HTTPResponse(self.ssl_sock, buffering=True) self.ssl_sock.settimeout(100) response.begin() #except httplib.BadStatusLine as e: # xlog.warn("%s _request bad status line:%r", self.ip, e) except Exception as e: xlog.warn("%s h1_request:%r", self.ip, e) google_ip.report_connect_closed(self.ssl_sock.ip, "request_fail") self.retry_task_cb(task) self.close("request fail") return task.set_state("h1_get_head") body_length = int(response.getheader("Content-Length", "0")) task.content_length = body_length response.headers = response.msg.dict response.worker = self response.task = task response.ssl_sock = self.ssl_sock task.queue.put(response) if body_length == 0: self.accept_task = True self.processed_tasks += 1 return # read response body, try: start = 0 end = body_length time_response = last_read_time = time.time() while True: if start >= end: self.ssl_sock.received_size += body_length time_cost = (time.time() - time_response) if time_cost != 0: speed = body_length / time_cost task.set_state("h1_finish[SP:%d]" % speed) self.report_speed(speed, body_length) self.accept_task = True self.processed_tasks += 1 return to_read = max(end - start, 65535) data = response.read(to_read) if not data: if time.time() - last_read_time > 20: xlog.warn("%s read timeout t:%d expect:%d left:%d ", self.ip, (time.time() - time_response) * 1000, body_length, (end - start)) break else: time.sleep(0.1) continue last_read_time = time.time() data_len = len(data) start += data_len task.put_data(data) except Exception as e: xlog.warn("%s h1_request:%r", self.ip, e) task.put_data("") google_ip.report_connect_closed(self.ssl_sock.ip, "down fail") self.close("request body fail")
def __fetchlet(self, range_queue, data_queue, range_delay_size): headers = dict((k.title(), v) for k, v in self.headers.items()) headers['Connection'] = 'close' while not self._stopped: try: try: start, end, response = range_queue.get(timeout=1) if self.expect_begin < start and data_queue.qsize( ) * self.bufsize + range_delay_size > 30 * 1024 * 1024: range_queue.put((start, end, response)) time.sleep(10) continue headers['Range'] = 'bytes=%d-%d' % (start, end) if not response: response = fetch(self.method, self.url, headers, self.body) except Queue.Empty: continue except Exception as e: xlog.warning("RangeFetch fetch response %r in __fetchlet", e) range_queue.put((start, end, None)) continue if not response: xlog.warning('RangeFetch %s return %r', headers['Range'], response) range_queue.put((start, end, None)) continue if response.app_status != 200: xlog.warning('Range Fetch return %s "%s %s" %s ', response.app_status, self.method, self.url, headers['Range']) if response.app_status == 404: xlog.warning('APPID %r not exists, remove it.', response.ssl_sock.appid) appid_manager.report_not_exist(response.ssl_sock.appid, response.ssl_sock.ip) appid = appid_manager.get_appid() if not appid: xlog.error("no appid left") self._stopped = True response.close() return if response.app_status == 503: xlog.warning( 'APPID %r out of Quota, remove it temporary.', response.ssl_sock.appid) appid_manager.report_out_of_quota( response.ssl_sock.appid) appid = appid_manager.get_appid() if not appid: xlog.error("no appid left") self._stopped = True response.close() return google_ip.report_connect_closed(response.ssl_sock.ip, "app err") response.close() range_queue.put((start, end, None)) continue if response.getheader('Location'): self.url = urlparse.urljoin(self.url, response.getheader('Location')) xlog.info('RangeFetch Redirect(%r)', self.url) google_ip.report_connect_closed(response.ssl_sock.ip, "reLocation") response.close() range_queue.put((start, end, None)) continue if 200 <= response.status < 300: content_range = response.getheader('Content-Range') if not content_range: xlog.warning( 'RangeFetch "%s %s" return Content-Range=%r: response headers=%r, retry %s-%s', self.method, self.url, content_range, response.getheaders(), start, end) google_ip.report_connect_closed( response.ssl_sock.ip, "no range") response.close() range_queue.put((start, end, None)) continue content_length = int( response.getheader('Content-Length', 0)) xlog.info('>>>>>>>>>>>>>>> [thread %s] %s %s', threading.currentThread().ident, content_length, content_range) time_last_read = time.time() while start < end + 1: try: data = response.read(self.bufsize) if not data: if time.time() - time_last_read > 20: break else: time.sleep(0.1) continue time_last_read = time.time() data_len = len(data) data_queue.put((start, data)) start += data_len except Exception as e: xlog.warning('RangeFetch "%s %s" %s failed: %s', self.method, self.url, headers['Range'], e) break if start < end + 1: xlog.warning('RangeFetch "%s %s" retry %s-%s', self.method, self.url, start, end) google_ip.report_connect_closed( response.ssl_sock.ip, "down err") response.close() range_queue.put((start, end, None)) continue https_manager.save_ssl_connection_for_reuse( response.ssl_sock) xlog.info('>>>>>>>>>>>>>>> Successfully reached %d bytes.', start - 1) else: xlog.error('RangeFetch %r return %s', self.url, response.status) google_ip.report_connect_closed(response.ssl_sock.ip, "status err") response.close() range_queue.put((start, end, None)) continue except StandardError as e: xlog.exception('RangeFetch._fetchlet error:%s', e) raise
def request_task(self, task): task.set_state("h1_req") self.ssl_sock.last_use_time = time.time() task.headers['Host'] = self.ssl_sock.host request_data = 'POST /_gh/ HTTP/1.1\r\n' request_data += ''.join('%s: %s\r\n' % (k, v) for k, v in task.headers.items()) request_data += '\r\n' try: self.ssl_sock.send(request_data.encode()) payload_len = len(task.body) start = 0 while start < payload_len: send_size = min(payload_len - start, 65535) sended = self.ssl_sock.send(task.body[start:start + send_size]) start += sended response = simple_http_client.Response(self.ssl_sock) response.begin(timeout=task.timeout) except Exception as e: xlog.warn("%s h1_request:%r", self.ip, e) google_ip.report_connect_closed(self.ssl_sock.ip, "request_fail") self.task = task self.close("request fail") return task.set_state("h1_get_head") body_length = int(response.getheader("Content-Length", "0")) task.content_length = body_length task.responsed = True response.worker = self response.task = task response.ssl_sock = self.ssl_sock task.queue.put(response) if body_length == 0: self.accept_task = True self.processed_tasks += 1 return # read response body, try: start = 0 end = body_length time_response = last_read_time = time.time() while True: if start >= end: self.ssl_sock.received_size += body_length time_cost = (time.time() - time_response) if time_cost != 0: speed = body_length / time_cost task.set_state("h1_finish[SP:%d]" % speed) self.report_speed(speed, body_length) self.transfered_size += body_length task.finish() self.task = None self.accept_task = True self.idle_cb() self.processed_tasks += 1 self.last_active_time = time.time() return data = response.read() # task.set_state("read body:%d" % len(data)) if not data: if time.time() - last_read_time > 20: xlog.warn("%s read timeout t:%d expect:%d left:%d ", self.ip, (time.time() - time_response) * 1000, body_length, (end - start)) break else: time.sleep(0.1) continue last_read_time = time.time() data_len = len(data) start += data_len task.put_data(data) except Exception as e: xlog.warn("%s h1_request:%r", self.ip, e) task.finish() google_ip.report_connect_closed(self.ssl_sock.ip, "down fail") self.close("request body fail")
def __fetchlet(self, range_queue, data_queue, range_delay_size): headers = dict((k.title(), v) for k, v in self.headers.items()) headers['Connection'] = 'close' while not self._stopped: try: try: start, end, response = range_queue.get(timeout=1) if self.expect_begin < start and data_queue.qsize() * self.bufsize + range_delay_size > 30*1024*1024: range_queue.put((start, end, response)) time.sleep(10) continue headers['Range'] = 'bytes=%d-%d' % (start, end) if not response: response = fetch(self.method, self.url, headers, self.body) except Queue.Empty: continue except Exception as e: xlog.warning("RangeFetch fetch response %r in __fetchlet", e) range_queue.put((start, end, None)) continue if not response: xlog.warning('RangeFetch %s return %r', headers['Range'], response) range_queue.put((start, end, None)) continue if response.app_status != 200: xlog.warning('Range Fetch return %s "%s %s" %s ', response.app_status, self.method, self.url, headers['Range']) if response.app_status == 404: xlog.warning('APPID %r not exists, remove it.', response.ssl_sock.appid) appid_manager.report_not_exist(response.ssl_sock.appid, response.ssl_sock.ip) appid = appid_manager.get_appid() if not appid: xlog.error("no appid left") self._stopped = True response.close() return if response.app_status == 503: xlog.warning('APPID %r out of Quota, remove it temporary.', response.ssl_sock.appid) appid_manager.report_out_of_quota(response.ssl_sock.appid) appid = appid_manager.get_appid() if not appid: xlog.error("no appid left") self._stopped = True response.close() return google_ip.report_connect_closed(response.ssl_sock.ip, "app err") response.close() range_queue.put((start, end, None)) continue if response.getheader('Location'): self.url = urlparse.urljoin(self.url, response.getheader('Location')) xlog.info('RangeFetch Redirect(%r)', self.url) google_ip.report_connect_closed(response.ssl_sock.ip, "reLocation") response.close() range_queue.put((start, end, None)) continue if 200 <= response.status < 300: content_range = response.getheader('Content-Range') if not content_range: xlog.warning('RangeFetch "%s %s" return Content-Range=%r: response headers=%r, retry %s-%s', self.method, self.url, content_range, response.getheaders(), start, end) google_ip.report_connect_closed(response.ssl_sock.ip, "no range") response.close() range_queue.put((start, end, None)) continue content_length = int(response.getheader('Content-Length', 0)) xlog.info('>>>>>>>>>>>>>>> [thread %s] %s %s', threading.currentThread().ident, content_length, content_range) time_last_read = time.time() while start < end + 1: try: data = response.read(self.bufsize) if not data: if time.time() - time_last_read > 20: break else: time.sleep(0.1) continue time_last_read = time.time() data_len = len(data) data_queue.put((start, data)) start += data_len except Exception as e: xlog.warning('RangeFetch "%s %s" %s failed: %s', self.method, self.url, headers['Range'], e) break if start < end + 1: xlog.warning('RangeFetch "%s %s" retry %s-%s', self.method, self.url, start, end) google_ip.report_connect_closed(response.ssl_sock.ip, "down err") response.close() range_queue.put((start, end, None)) continue https_manager.save_ssl_connection_for_reuse(response.ssl_sock) xlog.info('>>>>>>>>>>>>>>> Successfully reached %d bytes.', start - 1) else: xlog.error('RangeFetch %r return %s', self.url, response.status) google_ip.report_connect_closed(response.ssl_sock.ip, "status err") response.close() range_queue.put((start, end, None)) continue except StandardError as e: xlog.exception('RangeFetch._fetchlet error:%s', e) raise
def request_task(self, task): start_time = time.time() task.set_state("h1_req") self.ssl_sock.last_use_time = time.time() task.headers['Host'] = self.ssl_sock.host request_data = 'POST /_gh/ HTTP/1.1\r\n' request_data += ''.join('%s: %s\r\n' % (k, v) for k, v in task.headers.items()) request_data += '\r\n' try: self.ssl_sock.send(request_data.encode()) payload_len = len(task.body) start = 0 while start < payload_len: send_size = min(payload_len - start, 65535) sended = self.ssl_sock.send(task.body[start:start+send_size]) start += sended response = simple_http_client.Response(self.ssl_sock) response.begin(timeout=task.timeout) except Exception as e: xlog.warn("%s h1_request:%s %r time_cost:%d inactive:%d", self.ip, task.url, e, (time.time()-start_time)*1000, (time.time() - self.last_active_time)*1000) google_ip.report_connect_closed(self.ssl_sock.ip, "request_fail") self.task = task self.close("request fail") return task.set_state("h1_get_head") body_length = int(response.getheader("Content-Length", "0")) task.content_length = body_length task.responsed = True response.worker = self response.task = task response.ssl_sock = self.ssl_sock task.queue.put(response) if body_length == 0: self.accept_task = True self.processed_tasks += 1 return # read response body, try: start = 0 end = body_length time_response = last_read_time = time.time() while True: if start >= end: self.ssl_sock.received_size += body_length time_cost = (time.time() - time_response) if time_cost != 0: speed = body_length / time_cost task.set_state("h1_finish[SP:%d]" % speed) self.report_speed(speed, body_length) self.transfered_size += body_length task.finish() self.task = None self.accept_task = True self.idle_cb() self.processed_tasks += 1 self.last_active_time = time.time() check_local_network.report_ok(self.ssl_sock.ip) return data = response.read() # task.set_state("read body:%d" % len(data)) if not data: if time.time() - last_read_time > 20: xlog.warn("%s read timeout t:%d expect:%d left:%d ", self.ip, (time.time()-time_response)*1000, body_length, (end-start)) break else: time.sleep(0.1) continue last_read_time = time.time() data_len = len(data) start += data_len task.put_data(data) except Exception as e: xlog.warn("%s h1 get data:%r", self.ip, e) task.finish() google_ip.report_connect_closed(self.ssl_sock.ip, "down fail") self.close("request body fail")