Ejemplo n.º 1
0
def request_gae_proxy(method, url, headers, body):
    time_request = time.time()

    while True:
        if time.time() - time_request > 60:  #time out
            return False

        try:
            response = fetch_by_gae(method, url, headers, body)
            if response.app_status < 300:
                return response

            xlog.warn("fetch gae status:%s url:%s", response.app_status, url)
            if response.app_status == 506:
                # fetch fail at http request
                continue

            server_type = response.app_headers.get('Server', "")
            if "gws" not in server_type and "Google Frontend" not in server_type and "GFE" not in server_type:
                xlog.warn("IP:%s not support GAE, server type:%s",
                          response.ssl_sock.ip, server_type)
                google_ip.report_connect_fail(response.ssl_sock.ip,
                                              force_remove=True)
                response.worker.close("ip not support GAE")
                continue

            if response.app_status == 404:
                # xlog.warning('APPID %r not exists, remove it.', response.ssl_sock.appid)
                appid_manager.report_not_exist(response.ssl_sock.appid,
                                               response.ssl_sock.ip)
                # google_ip.report_connect_closed(response.ssl_sock.ip, "appid not exist")
                response.worker.close("appid not exist:%s" %
                                      response.ssl_sock.appid)
                continue

            if response.app_status == 403 or response.app_status == 405:  #Method not allowed
                # google have changed from gws to gvs, need to remove.
                xlog.warning('405 Method not allowed. remove %s ',
                             response.ssl_sock.ip)
                # some ip can connect, and server type is gws
                # but can't use as GAE server
                # so we need remove it immediately
                google_ip.report_connect_fail(response.ssl_sock.ip,
                                              force_remove=True)
                response.worker.close("ip not support GAE")
                continue

            if response.app_status == 503:
                xlog.warning('APPID %r out of Quota, remove it. %s',
                             response.ssl_sock.appid, response.ssl_sock.ip)
                appid_manager.report_out_of_quota(response.ssl_sock.appid)
                # google_ip.report_connect_closed(response.ssl_sock.ip, "out of quota")
                response.worker.close("appid out of quota")
                continue

        except GAE_Exception as e:
            xlog.warn("gae_exception:%r %s", e, url)
        except Exception as e:
            xlog.exception('gae_handler.handler %r %s , retry...', e, url)
Ejemplo n.º 2
0
def request_gae_server(headers, body):
    # process on http protocol
    # process status code return by http server
    # raise error, let up layer retry.

    response = http_dispatch.request(headers, body)
    if not response:
        raise GAE_Exception(600, "fetch gae fail")

    if response.status >= 600:
        raise GAE_Exception(response.status,
                            "fetch gae fail:%d" % response.status)

    server_type = response.headers.get("server", "")
    content_type = response.headers.get("content-type", "")
    if ("gws" not in server_type and "Google Frontend" not in server_type and "GFE" not in server_type) or \
        response.status == 403 or response.status == 405:

        # some ip can connect, and server type can be gws
        # but can't use as GAE server
        # so we need remove it immediately

        xlog.warn("IP:%s not support GAE, server:%s status:%d",
                  response.ssl_sock.ip, server_type, response.status)
        google_ip.recheck_ip(response.ssl_sock.ip)
        response.worker.close("ip not support GAE")
        raise GAE_Exception(602, "ip not support GAE")

    if response.status == 404:
        # xlog.warning('APPID %r not exists, remove it.', response.ssl_sock.appid)
        appid_manager.report_not_exist(response.ssl_sock.appid,
                                       response.ssl_sock.ip)
        # google_ip.report_connect_closed(response.ssl_sock.ip, "appid not exist")
        response.worker.close("appid not exist:%s" % response.ssl_sock.appid)
        raise GAE_Exception(603, "appid not support GAE")

    if response.status == 503:
        appid = response.ssl_sock.appid
        xlog.warning('APPID %r out of Quota, remove it. %s', appid,
                     response.ssl_sock.ip)
        appid_manager.report_out_of_quota(appid)
        # google_ip.report_connect_closed(response.ssl_sock.ip, "out of quota")
        response.worker.close("appid out of quota:%s" % appid)
        raise GAE_Exception(604, "appid out of quota:%s" % appid)

    if response.status > 300:
        raise GAE_Exception(605, "status:%d" % response.status)

    if response.status != 200:
        xlog.warn("GAE %s appid:%s status:%d", response.ssl_sock.ip,
                  response.ssl_sock.appid, response.status)
    else:
        xlog.info('good ip num:%d, bad ip num:%s', google_ip.good_ip_num,
                  google_ip.bad_ip_num)

    return response
Ejemplo n.º 3
0
def request_gae_server(headers, body, url, timeout):
    # process on http protocol
    # process status code return by http server
    # raise error, let up layer retry.

    response = http_dispatch.request(headers, body, url, timeout)
    if not response:
        raise GAE_Exception(600, "fetch gae fail")

    if response.status >= 600:
        raise GAE_Exception(
            response.status, "fetch gae fail:%d" % response.status)

    server_type = response.getheader("server", "")
    # content_type = response.getheaders("content-type", "")
    if ("gws" not in server_type and "Google Frontend" not in server_type and "GFE" not in server_type) or \
            response.status == 403 or response.status == 405:

        # some ip can connect, and server type can be gws
        # but can't use as GAE server
        # so we need remove it immediately

        xlog.warn("IP:%s not support GAE, headers:%s status:%d", response.ssl_sock.ip, response.headers,
                  response.status)
        google_ip.recheck_ip(response.ssl_sock.ip)
        response.worker.close("ip not support GAE")
        raise GAE_Exception(602, "ip not support GAE")

    if response.status == 404:
        # xlog.warning('APPID %r not exists, remove it.', response.ssl_sock.appid)
        appid_manager.report_not_exist(
            response.ssl_sock.appid, response.ssl_sock.ip)
        # google_ip.report_connect_closed(response.ssl_sock.ip, "appid not exist")
        response.worker.close("appid not exist:%s" % response.ssl_sock.appid)
        raise GAE_Exception(603, "appid not support GAE")

    if response.status == 503:
        appid = response.ssl_sock.appid
        xlog.warning('APPID %r out of Quota, remove it. %s',
                     appid, response.ssl_sock.ip)
        appid_manager.report_out_of_quota(appid)
        # google_ip.report_connect_closed(response.ssl_sock.ip, "out of quota")
        response.worker.close("appid out of quota:%s" % appid)
        raise GAE_Exception(604, "appid out of quota:%s" % appid)

    if response.status > 300:
        raise GAE_Exception(605, "status:%d" % response.status)

    if response.status != 200:
        xlog.warn("GAE %s appid:%s status:%d", response.ssl_sock.ip,
                  response.ssl_sock.appid, response.status)

    return response
Ejemplo n.º 4
0
def request_gae_proxy(method, url, headers, body):
    time_request = time.time()

    while True:
        if time.time() - time_request > 60: #time out
            return False

        try:
            response = fetch_by_gae(method, url, headers, body)
            if response.app_status < 300:
                return response

            xlog.warn("fetch gae status:%s url:%s", response.app_status, url)
            if response.app_status == 506:
                # fetch fail at http request
                continue

            server_type = response.app_headers.get('Server', "")
            if "gws" not in server_type and "Google Frontend" not in server_type and "GFE" not in server_type:
                xlog.warn("IP:%s not support GAE, server type:%s", response.ssl_sock.ip, server_type)
                google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True)
                response.worker.close("ip not support GAE")
                continue

            if response.app_status == 404:
                # xlog.warning('APPID %r not exists, remove it.', response.ssl_sock.appid)
                appid_manager.report_not_exist(response.ssl_sock.appid, response.ssl_sock.ip)
                # google_ip.report_connect_closed(response.ssl_sock.ip, "appid not exist")
                response.worker.close("appid not exist:%s" % response.ssl_sock.appid)
                continue

            if response.app_status == 403 or response.app_status == 405: #Method not allowed
                # google have changed from gws to gvs, need to remove.
                xlog.warning('405 Method not allowed. remove %s ', response.ssl_sock.ip)
                # some ip can connect, and server type is gws
                # but can't use as GAE server
                # so we need remove it immediately
                google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True)
                response.worker.close("ip not support GAE")
                continue

            if response.app_status == 503:
                xlog.warning('APPID %r out of Quota, remove it. %s', response.ssl_sock.appid, response.ssl_sock.ip)
                appid_manager.report_out_of_quota(response.ssl_sock.appid)
                # google_ip.report_connect_closed(response.ssl_sock.ip, "out of quota")
                response.worker.close("appid out of quota")
                continue

        except GAE_Exception as e:
            xlog.warn("gae_exception:%r %s", e, url)
        except Exception as e:
            xlog.exception('gae_handler.handler %r %s , retry...', e, url)
Ejemplo n.º 5
0
def request_gae_server(headers, body, url, timeout):
    response = http_dispatch.request(headers, body, url, timeout)
    if not response:
        raise GAE_Exception(600, "fetch gae fail")

    if response.status >= 600:
        raise GAE_Exception(response.status,
                            "fetch gae fail:%d" % response.status)

    server_type = response.getheader("server", "")
    if ("gws" not in server_type and "Google Frontend" not in server_type and "GFE" not in server_type) or \
            response.status == 403 or response.status == 405:
        xlog.warn("IP:%s not support GAE, headers:%s status:%d",
                  response.ssl_sock.ip, response.headers, response.status)
        google_ip.recheck_ip(response.ssl_sock.ip)
        response.worker.close("ip not support GAE")
        raise GAE_Exception(602, "ip not support GAE")

    if response.status == 404:
        # xlog.warning('APPID %r not exists, remove it.', response.ssl_sock.appid)
        appid_manager.report_not_exist(response.ssl_sock.appid,
                                       response.ssl_sock.ip)
        # google_ip.report_connect_closed(response.ssl_sock.ip, "appid not exist")
        response.worker.close("appid not exist:%s" % response.ssl_sock.appid)
        raise GAE_Exception(603, "appid not support GAE")

    if response.status == 503:
        appid = response.ssl_sock.appid
        xlog.warning('APPID %r out of Quota, remove it. %s', appid,
                     response.ssl_sock.ip)
        appid_manager.report_out_of_quota(appid)
        # google_ip.report_connect_closed(response.ssl_sock.ip, "out of quota")
        response.worker.close("appid out of quota:%s" % appid)
        raise GAE_Exception(604, "appid out of quota:%s" % appid)

    if response.status > 300:
        raise GAE_Exception(605, "status:%d" % response.status)

    if response.status != 200:
        xlog.warn("GAE %s appid:%s status:%d", response.ssl_sock.ip,
                  response.ssl_sock.appid, response.status)
    else:
        xlog.info('good ip num:%d, bad ip num:%s', google_ip.good_ip_num,
                  google_ip.bad_ip_num)

    return response
Ejemplo n.º 6
0
def request_gae_proxy(method, url, headers, body, timeout=60, retry=True):
    # make retry and time out
    time_request = time.time()
    request_headers, request_body = pack_request(method, url, headers, body)
    error_msg = []

    while True:
        if time.time() - time_request > timeout:
            raise GAE_Exception(600, b"".join(error_msg))

        if not retry and error_msg:
            raise GAE_Exception(600, b"".join(error_msg))

        try:
            response = request_gae_server(request_headers, request_body, url, timeout)

            check_local_network.report_network_ok()

            response = unpack_response(response)

            if response.app_msg:
                xlog.warn("server app return fail, status:%d",
                          response.app_status)
                # if len(response.app_msg) < 2048:
                #xlog.warn('app_msg:%s', cgi.escape(response.app_msg))

                if response.app_status == 510:
                    # reach 80% of traffic today
                    # disable for get big file.

                    appid_manager.report_out_of_quota(response.ssl_sock.appid)
                    response.worker.close(
                        "appid out of quota:%s" % response.ssl_sock.appid)
                    continue

            return response
        except GAE_Exception as e:
            err_msg = "gae_exception:%r %s" % (e, url)
            error_msg.append(err_msg)
            xlog.warn("gae_exception:%r %s", e, url)
        except Exception as e:
            err_msg = 'gae_handler.handler %r %s , retry...' % (e, url)
            error_msg.append(err_msg)
            xlog.exception('gae_handler.handler %r %s , retry...', e, url)
Ejemplo n.º 7
0
def request_gae_server(headers, body, url, timeout):
    response = http_dispatch.request(headers, body, url, timeout)
    if not response:
        raise GAE_Exception(600, "fetch gae fail")

    if response.status >= 600:
        raise GAE_Exception(
            response.status, "fetch gae fail:%d" % response.status)

    server_type = response.getheader("server", "")
    if ("gws" not in server_type and "Google Frontend" not in server_type and "GFE" not in server_type) or \
            response.status == 403 or response.status == 405:
        xlog.warn("IP:%s not support GAE, headers:%s status:%d", response.ssl_sock.ip, response.headers,
                  response.status)
        google_ip.recheck_ip(response.ssl_sock.ip)
        response.worker.close("ip not support GAE")
        raise GAE_Exception(602, "ip not support GAE")

    if response.status == 404:
        # xlog.warning('APPID %r not exists, remove it.', response.ssl_sock.appid)
        appid_manager.report_not_exist(
            response.ssl_sock.appid, response.ssl_sock.ip)
        # google_ip.report_connect_closed(response.ssl_sock.ip, "appid not exist")
        response.worker.close("appid not exist:%s" % response.ssl_sock.appid)
        raise GAE_Exception(603, "appid not support GAE")

    if response.status == 503:
        appid = response.ssl_sock.appid
        xlog.warning('APPID %r out of Quota, remove it. %s',
                     appid, response.ssl_sock.ip)
        appid_manager.report_out_of_quota(appid)
        # google_ip.report_connect_closed(response.ssl_sock.ip, "out of quota")
        response.worker.close("appid out of quota:%s" % appid)
        raise GAE_Exception(604, "appid out of quota:%s" % appid)

    if response.status > 300:
        raise GAE_Exception(605, "status:%d" % response.status)

    if response.status != 200:
        xlog.warn("GAE %s appid:%s status:%d", response.ssl_sock.ip, response.ssl_sock.appid, response.status)
    else:
        xlog.info('good ip num:%d, bad ip num:%s', google_ip.good_ip_num, google_ip.bad_ip_num)

    return response
Ejemplo n.º 8
0
def request_gae_proxy(method, url, headers, body, timeout=60, retry=True):
    # make retry and time out
    time_request = time.time()
    request_headers, request_body = pack_request(method, url, headers, body)
    error_msg = []

    while True:
        if time.time() - time_request > timeout:
            raise GAE_Exception(600, b"".join(error_msg))

        if not retry and error_msg:
            raise GAE_Exception(600, b"".join(error_msg))

        try:
            response = request_gae_server(request_headers, request_body, url, timeout)

            response = unpack_response(response)

            if response.app_msg:
                xlog.warn("server app return fail, status:%d",
                          response.app_status)
                # if len(response.app_msg) < 2048:
                #xlog.warn('app_msg:%s', cgi.escape(response.app_msg))

                if response.app_status == 510:
                    # reach 80% of traffic today
                    # disable for get big file.

                    appid_manager.report_out_of_quota(response.ssl_sock.appid)
                    response.worker.close(
                        "appid out of quota:%s" % response.ssl_sock.appid)
                    continue

            return response
        except GAE_Exception as e:
            err_msg = "gae_exception:%r %s" % (e, url)
            error_msg.append(err_msg)
            xlog.warn("gae_exception:%r %s", e, url)
        except Exception as e:
            err_msg = 'gae_handler.handler %r %s , retry...' % (e, url)
            error_msg.append(err_msg)
            xlog.exception('gae_handler.handler %r %s , retry...', e, url)
Ejemplo n.º 9
0
    def __fetchlet(self, range_queue, data_queue, range_delay_size):
        headers = dict((k.title(), v) for k, v in self.headers.items())
        headers['Connection'] = 'close'
        while not self._stopped:
            try:
                try:
                    start, end, response = range_queue.get(timeout=1)
                    if self.expect_begin < start and data_queue.qsize() * self.bufsize + range_delay_size > 30*1024*1024:
                        range_queue.put((start, end, response))
                        time.sleep(10)
                        continue
                    headers['Range'] = 'bytes=%d-%d' % (start, end)
                    if not response:
                        response = fetch(self.method, self.url, headers, self.body)
                except Queue.Empty:
                    continue
                except Exception as e:
                    xlog.warning("RangeFetch fetch response %r in __fetchlet", e)
                    range_queue.put((start, end, None))
                    continue

                if not response:
                    xlog.warning('RangeFetch %s return %r', headers['Range'], response)
                    range_queue.put((start, end, None))
                    continue
                if response.app_status != 200:
                    xlog.warning('Range Fetch return %s "%s %s" %s ', response.app_status, self.method, self.url, headers['Range'])

                    if response.app_status == 404:
                        xlog.warning('APPID %r not exists, remove it.', response.ssl_sock.appid)
                        appid_manager.report_not_exist(response.ssl_sock.appid, response.ssl_sock.ip)
                        appid = appid_manager.get_appid()
                        if not appid:
                            xlog.error("no appid left")
                            self._stopped = True
                            response.close()
                            return

                    if response.app_status == 503:
                        xlog.warning('APPID %r out of Quota, remove it temporary.', response.ssl_sock.appid)
                        appid_manager.report_out_of_quota(response.ssl_sock.appid)
                        appid = appid_manager.get_appid()
                        if not appid:
                            xlog.error("no appid left")
                            self._stopped = True
                            response.close()
                            return

                    google_ip.report_connect_closed(response.ssl_sock.ip, "app err")
                    response.close()
                    range_queue.put((start, end, None))
                    continue

                if response.getheader('Location'):
                    self.url = urlparse.urljoin(self.url, response.getheader('Location'))
                    xlog.info('RangeFetch Redirect(%r)', self.url)
                    google_ip.report_connect_closed(response.ssl_sock.ip, "reLocation")
                    response.close()
                    range_queue.put((start, end, None))
                    continue

                if 200 <= response.status < 300:
                    content_range = response.getheader('Content-Range')
                    if not content_range:
                        xlog.warning('RangeFetch "%s %s" return Content-Range=%r: response headers=%r, retry %s-%s',
                            self.method, self.url, content_range, response.getheaders(), start, end)
                        google_ip.report_connect_closed(response.ssl_sock.ip, "no range")
                        response.close()
                        range_queue.put((start, end, None))
                        continue
                    content_length = int(response.getheader('Content-Length', 0))
                    xlog.info('>>>>>>>>>>>>>>> [thread %s] %s %s', threading.currentThread().ident, content_length, content_range)

                    time_last_read = time.time()
                    while start < end + 1:
                        try:
                            data = response.read(self.bufsize)
                            if not data:
                                if time.time() - time_last_read > 20:
                                    break
                                else:
                                    time.sleep(0.1)
                                    continue

                            time_last_read = time.time()
                            data_len = len(data)
                            data_queue.put((start, data))
                            start += data_len

                        except Exception as e:
                            xlog.warning('RangeFetch "%s %s" %s failed: %s', self.method, self.url, headers['Range'], e)
                            break

                    if start < end + 1:
                        xlog.warning('RangeFetch "%s %s" retry %s-%s', self.method, self.url, start, end)
                        google_ip.report_connect_closed(response.ssl_sock.ip, "down err")
                        response.close()
                        range_queue.put((start, end, None))
                        continue

                    https_manager.save_ssl_connection_for_reuse(response.ssl_sock)
                    xlog.info('>>>>>>>>>>>>>>> Successfully reached %d bytes.', start - 1)
                else:
                    xlog.error('RangeFetch %r return %s', self.url, response.status)
                    google_ip.report_connect_closed(response.ssl_sock.ip, "status err")
                    response.close()
                    range_queue.put((start, end, None))
                    continue
            except StandardError as e:
                xlog.exception('RangeFetch._fetchlet error:%s', e)
                raise
Ejemplo n.º 10
0
def handler(method, url, headers, body, wfile):
    time_request = time.time()

    headers = clean_empty_header(headers)
    errors = []
    response = None
    while True:
        if time.time() - time_request > 30: #time out
            return return_fail_message(wfile)

        try:
            response = fetch(method, url, headers, body)
            if response.app_status != 200:
                xlog.warn("fetch gae status:%s url:%s", response.app_status, url)

                try:
                    server_type = response.getheader('Server', "")
                    if "gws" not in server_type and "Google Frontend" not in server_type and "GFE" not in server_type:
                        xlog.warn("IP:%s not support GAE, server type:%s", response.ssl_sock.ip, server_type)
                        google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True)
                        response.close()
                        continue
                except Exception as e:
                    errors.append(e)
                    xlog.warn('gae_handler.handler %r %s , retry...', e, url)
                    continue

            if response.app_status == 404:
                #xlog.warning('APPID %r not exists, remove it.', response.ssl_sock.appid)
                appid_manager.report_not_exist(response.ssl_sock.appid, response.ssl_sock.ip)
                google_ip.report_connect_closed(response.ssl_sock.ip, "appid not exist")
                appid = appid_manager.get_appid()

                if not appid:
                    html = generate_message_html('404 No usable Appid Exists', u'没有可用appid了,请配置可用的appid')
                    send_response(wfile, 404, body=html.encode('utf-8'))
                    response.close()
                    return
                else:
                    response.close()
                    continue

            if response.app_status == 403 or response.app_status == 405: #Method not allowed
                # google have changed from gws to gvs, need to remove.
                xlog.warning('405 Method not allowed. remove %s ', response.ssl_sock.ip)
                # some ip can connect, and server type is gws
                # but can't use as GAE server
                # so we need remove it immediately
                google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True)
                response.close()
                continue

            if response.app_status == 503:
                xlog.warning('APPID %r out of Quota, remove it. %s', response.ssl_sock.appid, response.ssl_sock.ip)
                appid_manager.report_out_of_quota(response.ssl_sock.appid)
                google_ip.report_connect_closed(response.ssl_sock.ip, "out of quota")
                appid = appid_manager.get_appid()

                if not appid:
                    html = generate_message_html('503 No usable Appid Exists', u'appid流量不足,请增加appid')
                    send_response(wfile, 503, body=html.encode('utf-8'))
                    response.close()
                    return
                else:
                    response.close()
                    continue

            if response.app_status < 500:
                break

        except GAE_Exception as e:
            errors.append(e)
            xlog.warn("gae_exception:%r %s", e, url)
        except Exception as e:
            errors.append(e)
            xlog.exception('gae_handler.handler %r %s , retry...', e, url)


    if response.status == 206:
        return RangeFetch(method, url, headers, body, response, wfile).fetch()

    try:
        wfile.write("HTTP/1.1 %d %s\r\n" % (response.status, response.reason))
        response_headers = {}
        for key, value in response.getheaders():
            key = key.title()
            if key == 'Transfer-Encoding':
                #http://en.wikipedia.org/wiki/Chunked_transfer_encoding
                continue
            if key in skip_headers:
                continue
            response_headers[key] = value

        if 'X-Head-Content-Length' in response_headers:
            if method == "HEAD":
                response_headers['Content-Length'] = response_headers['X-Head-Content-Length']
            del response_headers['X-Head-Content-Length']

        send_to_browser = True
        try:
            for key in response_headers:
                value = response_headers[key]
                send_header(wfile, key, value)
                #logging.debug("Head- %s: %s", key, value)
            wfile.write("\r\n")
        except Exception as e:
            send_to_browser = False
            xlog.warn("gae_handler.handler send response fail. t:%d e:%r %s", time.time()-time_request, e, url)


        if len(response.app_msg):
            xlog.warn("APPID error:%d url:%s", response.status, url)
            wfile.write(response.app_msg)
            google_ip.report_connect_closed(response.ssl_sock.ip, "app err")
            response.close()
            return

        content_length = int(response.getheader('Content-Length', 0))
        content_range = response.getheader('Content-Range', '')
        if content_range:
            start, end, length = tuple(int(x) for x in re.search(r'bytes (\d+)-(\d+)/(\d+)', content_range).group(1, 2, 3))
        else:
            start, end, length = 0, content_length-1, content_length
        body_length = end - start + 1

        last_read_time = time.time()
        time_response = time.time()
        while True:
            if start > end:
                time_finished = time.time()
                if body_length > 1024 and time_finished - time_response > 0:
                    speed = body_length / (time_finished - time_response)


                    xlog.info("GAE %d|%s|%d t:%d s:%d hs:%d Spd:%d %d %s",
                        response.ssl_sock.fd, response.ssl_sock.ip, response.ssl_sock.received_size, (time_finished-time_request)*1000,
                        length, response.ssl_sock.handshake_time, int(speed), response.status, url)
                else:
                    xlog.info("GAE %d|%s|%d t:%d s:%d hs:%d %d %s",
                        response.ssl_sock.fd, response.ssl_sock.ip, response.ssl_sock.received_size, (time_finished-time_request)*1000,
                        length, response.ssl_sock.handshake_time, response.status, url)

                response.ssl_sock.received_size += body_length
                https_manager.save_ssl_connection_for_reuse(response.ssl_sock, call_time=time_request)
                return

            data = response.read(config.AUTORANGE_BUFSIZE)
            if not data:
                if time.time() - last_read_time > 20:
                    google_ip.report_connect_closed(response.ssl_sock.ip, "down fail")
                    response.close()
                    xlog.warn("read timeout t:%d len:%d left:%d %s", (time.time()-time_request)*1000, length, (end-start), url)
                    return
                else:
                    time.sleep(0.1)
                    continue

            last_read_time = time.time()
            data_len = len(data)
            start += data_len
            if send_to_browser:
                try:
                    ret = wfile.write(data)
                    if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ:
                        xlog.debug("send to browser wfile.write ret:%d", ret)
                        ret = wfile.write(data)
                except Exception as e_b:
                    if e_b[0] in (errno.ECONNABORTED, errno.EPIPE, errno.ECONNRESET) or 'bad write retry' in repr(e_b):
                        xlog.warn('gae_handler send to browser return %r %r', e_b, url)
                    else:
                        xlog.warn('gae_handler send to browser return %r %r', e_b, url)
                    send_to_browser = False

    except NetWorkIOError as e:
        time_except = time.time()
        time_cost = time_except - time_request
        if e[0] in (errno.ECONNABORTED, errno.EPIPE) or 'bad write retry' in repr(e):
            xlog.warn("gae_handler err:%r time:%d %s ", e, time_cost, url)
            google_ip.report_connect_closed(response.ssl_sock.ip, "Net")
        else:
            xlog.exception("gae_handler except:%r %s", e, url)
    except Exception as e:
        xlog.exception("gae_handler except:%r %s", e, url)
Ejemplo n.º 11
0
def handler(method, url, headers, body, wfile):
    time_request = time.time()

    errors = []
    response = None
    for i in xrange(max_retry):
        try:
            response = fetch(method, url, headers, body)
            if response.app_status != 200:
                logging.debug("fetch gae status:%d url:%s", response.app_status, url)

            if response.app_status == 404:
                logging.warning('APPID %r not exists, remove it.', response.ssl_sock.appid)
                appid_manager.report_not_exist(response.ssl_sock.appid)
                appid = appid_manager.get_appid()

                if not appid:
                    html = generate_message_html('404 No usable Appid Exists', u'没有可用appid了,请配置可用的appid')
                    send_response(wfile, 404, body=html.encode('utf-8'))
                    response.close()
                    return
                else:
                    response.close()
                    continue

            if response.app_status == 503:
                logging.warning('APPID %r out of Quota, remove it.', response.ssl_sock.appid)
                appid_manager.report_out_of_quota(response.ssl_sock.appid)
                appid = appid_manager.get_appid()

                if not appid:
                    html = generate_message_html('503 No usable Appid Exists', u'appid流量不足,请增加appid')
                    send_response(wfile, 503, body=html.encode('utf-8'))
                    response.close()
                    return
                else:
                    response.close()
                    continue

            if response.app_status < 500:
                break

        except GAE_Exception as e:
            errors.append(e)
            logging.warn("gae_exception:%s %r", e, url)
        except Exception as e:
            errors.append(e)
            logging.exception('gae_handler.handler %r %s , retry...', e, url)

    if len(errors) == max_retry:
        if response and response.app_status >= 500:
            status = response.app_status
            headers = dict(response.getheaders())
            content = response.read()
        else:
            status = 502
            headers = {'Content-Type': 'text/html'}
            content = generate_message_html('502 URLFetch failed', 'Local URLFetch %r failed' % url, '<br>'.join(repr(x) for x in errors))

        if response:
            response.close()

        send_response(wfile, status, headers, content.encode('utf-8'))

        logging.warn("GAE %d %s %s", status, method, url)
        return

    if response.status == 206:
        return RangeFetch(method, url, headers, body, response, wfile).fetch()

    try:
        wfile.write("HTTP/1.1 %d %s\r\n" % (response.status, response.reason))
        response_headers = {}
        for key, value in response.getheaders():
            key = key.title()
            if key == 'Transfer-Encoding':
                continue
            if key in skip_headers:
                continue
            response_headers[key] = value

        if method == "HEAD":
            if 'X-Head-Content-Length' in response_headers:
                response_headers['Content-Length'] = response_headers['X-Head-Content-Length']
                del response_headers['X-Head-Content-Length']

        for key in response_headers:
            value = response_headers[key]
            send_header(wfile, key, value)
            #logging.debug("Head- %s: %s", key, value)
        wfile.write("\r\n")

        if len(response.app_msg):
            logging.warn("APPID error:%d url:%s", response.status, url)
            wfile.write(response.app_msg)
            response.close()
            return

        content_length = int(response.getheader('Content-Length', 0))
        content_range = response.getheader('Content-Range', '')
        if content_range:
            start, end, length = tuple(int(x) for x in re.search(r'bytes (\d+)-(\d+)/(\d+)', content_range).group(1, 2, 3))
        else:
            start, end, length = 0, content_length-1, content_length

        time_start = time.time()
        send_to_broswer = True
        while True:
            data = response.read(config.AUTORANGE_BUFSIZE)
            if not data and time.time() - time_start > 20:
                response.close()
                logging.warn("read timeout t:%d len:%d left:%d %s", (time.time()-time_request)*1000, length, (end-start), url)
                return

            data_len = len(data)
            start += data_len
            if send_to_broswer:
                try:
                    ret = wfile.write(data)
                    if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ:
                        logging.debug("send to browser wfile.write ret:%d", ret)
                        ret = wfile.write(data)
                except Exception as e_b:
                    if e_b[0] in (errno.ECONNABORTED, errno.EPIPE, errno.ECONNRESET) or 'bad write retry' in repr(e_b):
                        logging.warn('gae_handler send to browser return %r %r', e_b, url)
                    else:
                        logging.warn('gae_handler send to browser return %r %r', e_b, url)
                    send_to_broswer = False

            if start >= end:
                https_manager.save_ssl_connection_for_reuse(response.ssl_sock)
                logging.info("GAE t:%d s:%d %d %s", (time.time()-time_request)*1000, length, response.status, url)
                return

    except NetWorkIOError as e:
        if e[0] in (errno.ECONNABORTED, errno.EPIPE) or 'bad write retry' in repr(e):
            logging.warn("gae_handler err:%r %s ", e, url)
        else:
            logging.exception("gae_handler except:%r %s", e, url)
    except Exception as e:
        logging.exception("gae_handler except:%r %s", e, url)
Ejemplo n.º 12
0
    def __fetchlet(self, range_queue, data_queue, range_delay_size):
        headers = dict((k.title(), v) for k, v in self.headers.items())
        headers['Connection'] = 'close'
        while not self._stopped:
            try:
                try:
                    start, end, response = range_queue.get(timeout=1)
                    if self.expect_begin < start and data_queue.qsize(
                    ) * self.bufsize + range_delay_size > 30 * 1024 * 1024:
                        range_queue.put((start, end, response))
                        time.sleep(10)
                        continue
                    headers['Range'] = 'bytes=%d-%d' % (start, end)
                    if not response:
                        response = fetch(self.method, self.url, headers,
                                         self.body)
                except Queue.Empty:
                    continue
                except Exception as e:
                    xlog.warning("RangeFetch fetch response %r in __fetchlet",
                                 e)
                    range_queue.put((start, end, None))
                    continue

                if not response:
                    xlog.warning('RangeFetch %s return %r', headers['Range'],
                                 response)
                    range_queue.put((start, end, None))
                    continue
                if response.app_status != 200:
                    xlog.warning('Range Fetch return %s "%s %s" %s ',
                                 response.app_status, self.method, self.url,
                                 headers['Range'])

                    if response.app_status == 404:
                        xlog.warning('APPID %r not exists, remove it.',
                                     response.ssl_sock.appid)
                        appid_manager.report_not_exist(response.ssl_sock.appid)
                        appid = appid_manager.get_appid()
                        if not appid:
                            xlog.error("no appid left")
                            self._stopped = True
                            response.close()
                            return

                    if response.app_status == 503:
                        xlog.warning(
                            'APPID %r out of Quota, remove it temporary.',
                            response.ssl_sock.appid)
                        appid_manager.report_out_of_quota(
                            response.ssl_sock.appid)
                        appid = appid_manager.get_appid()
                        if not appid:
                            xlog.error("no appid left")
                            self._stopped = True
                            response.close()
                            return

                    response.close()
                    range_queue.put((start, end, None))
                    continue

                if response.getheader('Location'):
                    self.url = urlparse.urljoin(self.url,
                                                response.getheader('Location'))
                    xlog.info('RangeFetch Redirect(%r)', self.url)
                    response.close()
                    range_queue.put((start, end, None))
                    continue

                if 200 <= response.status < 300:
                    content_range = response.getheader('Content-Range')
                    if not content_range:
                        xlog.warning(
                            'RangeFetch "%s %s" return Content-Range=%r: response headers=%r, retry %s-%s',
                            self.method, self.url, content_range,
                            response.getheaders(), start, end)
                        response.close()
                        range_queue.put((start, end, None))
                        continue
                    content_length = int(
                        response.getheader('Content-Length', 0))
                    xlog.info('>>>>>>>>>>>>>>> [thread %s] %s %s',
                              threading.currentThread().ident, content_length,
                              content_range)

                    time_last_read = time.time()
                    while start < end + 1:
                        try:
                            data = response.read(self.bufsize)
                            if not data:
                                if time.time() - time_last_read > 20:
                                    break
                                else:
                                    time.sleep(0.1)
                                    continue

                            time_last_read = time.time()
                            data_len = len(data)
                            data_queue.put((start, data))
                            start += data_len

                        except Exception as e:
                            xlog.warning('RangeFetch "%s %s" %s failed: %s',
                                         self.method, self.url,
                                         headers['Range'], e)
                            break

                    if start < end + 1:
                        xlog.warning('RangeFetch "%s %s" retry %s-%s',
                                     self.method, self.url, start, end)
                        response.close()
                        range_queue.put((start, end, None))
                        continue

                    https_manager.save_ssl_connection_for_reuse(
                        response.ssl_sock)
                    xlog.info('>>>>>>>>>>>>>>> Successfully reached %d bytes.',
                              start - 1)
                else:
                    xlog.error('RangeFetch %r return %s', self.url,
                               response.status)
                    response.close()
                    range_queue.put((start, end, None))
                    continue
            except StandardError as e:
                xlog.exception('RangeFetch._fetchlet error:%s', e)
                raise
Ejemplo n.º 13
0
def handler(method, url, headers, body, wfile):
    time_request = time.time()

    headers = clean_empty_header(headers)
    errors = []
    response = None
    while True:
        if time.time() - time_request > 30:  #time out
            return return_fail_message(wfile)

        try:
            response = fetch(method, url, headers, body)
            if response.app_status != 200:
                xlog.warn("fetch gae status:%s url:%s", response.app_status,
                          url)

            if response.app_status == 404:
                xlog.warning('APPID %r not exists, remove it.',
                             response.ssl_sock.appid)
                appid_manager.report_not_exist(response.ssl_sock.appid)
                appid = appid_manager.get_appid()

                if not appid:
                    html = generate_message_html('404 No usable Appid Exists',
                                                 u'没有可用appid了,请配置可用的appid')
                    send_response(wfile, 404, body=html.encode('utf-8'))
                    response.close()
                    return
                else:
                    response.close()
                    continue

            if response.app_status == 403 or response.app_status == 405:  #Method not allowed
                # google have changed from gws to gvs, need to remove.
                xlog.warning('405 Method not allowed. remove %s ',
                             response.ssl_sock.ip)
                # some ip can connect, and server type is gws
                # but can't use as GAE server
                # so we need remove it immediately
                google_ip.report_connect_fail(response.ssl_sock.ip,
                                              force_remove=True)
                response.close()
                continue

            if response.app_status == 503:
                xlog.warning('APPID %r out of Quota, remove it.',
                             response.ssl_sock.appid)
                appid_manager.report_out_of_quota(response.ssl_sock.appid)
                appid = appid_manager.get_appid()

                if not appid:
                    html = generate_message_html('503 No usable Appid Exists',
                                                 u'appid流量不足,请增加appid')
                    send_response(wfile, 503, body=html.encode('utf-8'))
                    response.close()
                    return
                else:
                    response.close()
                    continue

            if response.app_status < 500:
                break

        except GAE_Exception as e:
            errors.append(e)
            xlog.warn("gae_exception:%r %s", e, url)
        except Exception as e:
            errors.append(e)
            xlog.exception('gae_handler.handler %r %s , retry...', e, url)

    if response.status == 206:
        return RangeFetch(method, url, headers, body, response, wfile).fetch()
    try:
        wfile.write("HTTP/1.1 %d %s\r\n" % (response.status, response.reason))
        response_headers = {}
        for key, value in response.getheaders():
            key = key.title()
            if key == 'Transfer-Encoding':
                #http://en.wikipedia.org/wiki/Chunked_transfer_encoding
                continue
            if key in skip_headers:
                continue
            response_headers[key] = value

        if 'X-Head-Content-Length' in response_headers:
            if method == "HEAD":
                response_headers['Content-Length'] = response_headers[
                    'X-Head-Content-Length']
            del response_headers['X-Head-Content-Length']

        send_to_browser = True
        try:
            for key in response_headers:
                value = response_headers[key]
                send_header(wfile, key, value)
                #logging.debug("Head- %s: %s", key, value)
            wfile.write("\r\n")
        except Exception as e:
            send_to_browser = False
            xlog.warn("gae_handler.handler send response fail. t:%d e:%r %s",
                      time.time() - time_request, e, url)

        if len(response.app_msg):
            xlog.warn("APPID error:%d url:%s", response.status, url)
            wfile.write(response.app_msg)
            response.close()
            return

        content_length = int(response.getheader('Content-Length', 0))
        content_range = response.getheader('Content-Range', '')
        if content_range:
            start, end, length = tuple(
                int(x) for x in re.search(r'bytes (\d+)-(\d+)/(\d+)',
                                          content_range).group(1, 2, 3))
        else:
            start, end, length = 0, content_length - 1, content_length

        last_read_time = time.time()
        while True:
            if start > end:
                https_manager.save_ssl_connection_for_reuse(response.ssl_sock)
                xlog.info("GAE t:%d s:%d %d %s",
                          (time.time() - time_request) * 1000, length,
                          response.status, url)
                return

            data = response.read(config.AUTORANGE_BUFSIZE)
            if not data:
                if time.time() - last_read_time > 20:
                    response.close()
                    xlog.warn("read timeout t:%d len:%d left:%d %s",
                              (time.time() - time_request) * 1000, length,
                              (end - start), url)
                    return
                else:
                    time.sleep(0.1)
                    continue

            last_read_time = time.time()
            data_len = len(data)
            start += data_len
            if send_to_browser:
                try:
                    ret = wfile.write(data)
                    if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ:
                        xlog.debug("send to browser wfile.write ret:%d", ret)
                        ret = wfile.write(data)
                except Exception as e_b:
                    if e_b[0] in (errno.ECONNABORTED, errno.EPIPE,
                                  errno.ECONNRESET
                                  ) or 'bad write retry' in repr(e_b):
                        xlog.warn('gae_handler send to browser return %r %r',
                                  e_b, url)
                    else:
                        xlog.warn('gae_handler send to browser return %r %r',
                                  e_b, url)
                    send_to_browser = False

    except NetWorkIOError as e:
        time_except = time.time()
        time_cost = time_except - time_request
        if e[0] in (errno.ECONNABORTED,
                    errno.EPIPE) or 'bad write retry' in repr(e):
            xlog.warn("gae_handler err:%r time:%d %s ", e, time_cost, url)
        else:
            xlog.exception("gae_handler except:%r %s", e, url)
    except Exception as e:
        xlog.exception("gae_handler except:%r %s", e, url)
Ejemplo n.º 14
0
    def do_AGENT(self):
        """GAE http urlfetch"""
        request_headers = dict((k.title(), v) for k, v in self.headers.items())
        host = request_headers.get('Host', '')
        path = self.parsed_url.path
        range_in_query = 'range=' in self.parsed_url.query
        special_range = (any(x(host) for x in config.AUTORANGE_HOSTS_MATCH) or path.endswith(config.AUTORANGE_ENDSWITH)) and not path.endswith(config.AUTORANGE_NOENDSWITH)
        if 'Range' in request_headers:
            m = re.search(r'bytes=(\d+)-', request_headers['Range'])
            start = int(m.group(1) if m else 0)
            request_headers['Range'] = 'bytes=%d-%d' % (start, start+config.AUTORANGE_MAXSIZE-1)
            logging.info('autorange range=%r match url=%r', request_headers['Range'], self.path)
        elif not range_in_query and special_range:
            logging.info('Found [autorange]endswith match url=%r', self.path)
            m = re.search(r'bytes=(\d+)-', request_headers.get('Range', ''))
            start = int(m.group(1) if m else 0)
            request_headers['Range'] = 'bytes=%d-%d' % (start, start+config.AUTORANGE_MAXSIZE-1)

        payload = b''
        if 'Content-Length' in request_headers:
            try:
                payload = self.rfile.read(int(request_headers.get('Content-Length', 0)))
            except NetWorkIOError as e:
                logging.error('handle_method_urlfetch read payload failed:%s', e)
                return

        response = None
        errors = []
        headers_sent = False


        for retry in range(config.FETCHMAX_LOCAL):
            try:
                content_length = 0
                kwargs = {}

                # TODO: test validate = 1
                kwargs['validate'] = 0

                time_start = time.time()
                response = gae_urlfetch(self.command, self.path, request_headers, payload, **kwargs)
                time_stop = time.time()
                time_cost = int((time_stop - time_start) * 1000)
                if not response:
                    if retry >= config.FETCHMAX_LOCAL-1:
                        html = generate_message_html('502 URLFetch failed', 'Local URLFetch %r failed' % self.path, str(errors))
                        self.wfile.write(b'HTTP/1.0 502\r\nContent-Type: text/html\r\n\r\n' + html.encode('utf-8'))
                        logging.warning('GET no response %s ', self.path)
                        return
                    else:
                        continue

                # appid not exists, try remove it from appid
                if response.app_status == 404:
                    logging.warning('APPID %r not exists, remove it.', response.ssl_sock.appid)
                    appid_manager.report_not_exist(response.ssl_sock.appid)
                    appid = appid_manager.get_appid()

                    if not appid:
                        html = generate_message_html('404 No usable Appid Exists', u'没有可用appid了,请配置可用的appid')
                        self.wfile.write(b'HTTP/1.0 404\r\nContent-Type: text/html\r\n\r\n' + html.encode('utf-8'))
                        response.close()
                        return
                    else:
                        continue

                # appid over qouta, switch to next appid
                if response.app_status == 503:
                    logging.warning('APPID %r out of Quota, remove it.', response.ssl_sock.appid)
                    appid_manager.report_out_of_quota(response.ssl_sock.appid)
                    appid = appid_manager.get_appid()

                    if not appid:
                        html = generate_message_html('503 No usable Appid Exists', u'appid流量不足,请增加appid')
                        self.wfile.write(b'HTTP/1.0 503\r\nContent-Type: text/html\r\n\r\n' + html.encode('utf-8'))
                        response.close()
                        return
                    else:
                        continue

                # 500 is Web server internal err
                if response.app_status == 500 and range_in_query and special_range:
                    logging.warning('500 with range in query') #, need trying another APPID?
                    response.close()
                    continue

                if response.app_status == 501:
                    deploy_url = "http://127.0.0.1:8085/?module=goagent&menu=deploy"
                    message = u'请重新部署服务端: <a href="%s">%s</a>' % (deploy_url, deploy_url)
                    html = generate_message_html('Please deploy your new server', message)
                    self.wfile.write(b'HTTP/1.0 501\r\nContent-Type: text/html\r\n\r\n' + html.encode('utf-8'))
                    logging.warning('501 Please deploy your new server') #, need trying another APPID?
                    response.close()
                    return

                if response.app_status != 200 and retry == config.FETCHMAX_LOCAL-1:
                    logging.warn('GAE %s %s status:%s', self.command, self.path, response.status)
                    self.wfile.write(('HTTP/1.1 %s\r\n%s\r\n' % (response.status, ''.join('%s: %s\r\n' % (k.title(), v) for k, v in response.getheaders() if k.title() != 'Transfer-Encoding'))))
                    self.wfile.write(response.read())
                    response.close()
                    return

                # first response, has no retry.
                if not headers_sent:
                    data_size = response.getheader('Content-Length', '0')
                    download_speed = int(data_size) * 1000 / time_cost
                    logging.info('"GAE t:%d speed:%d len:%s status:%s %s %s HTTP/1.1"', time_cost, download_speed, response.getheader('Content-Length', '-'), response.status, self.command, self.path)
                    if response.status == 206:
                        # 206 means "Partial Content"
                        rangefetch = RangeFetch(self.wfile, response, self.command, self.path, self.headers, payload, maxsize=config.AUTORANGE_MAXSIZE, bufsize=config.AUTORANGE_BUFSIZE, waitsize=config.AUTORANGE_WAITSIZE, threads=config.AUTORANGE_THREADS)
                        return rangefetch.fetch()
                    if response.getheader('Set-Cookie'):
                        response.msg['Set-Cookie'] = self.normcookie(response.getheader('Set-Cookie'))
                    if response.getheader('Content-Disposition') and '"' not in response.getheader('Content-Disposition'):
                        response.msg['Content-Disposition'] = self.normattachment(response.getheader('Content-Disposition'))
                    headers_data = 'HTTP/1.1 %s\r\n%s\r\n' % (response.status, ''.join('%s: %s\r\n' % (k.title(), v) for k, v in response.getheaders() if k.title() != 'Transfer-Encoding'))
                    #logging.debug('headers_data=%s', headers_data)
                    #self.wfile.write(headers_data.encode() if bytes is not str else headers_data)
                    self.wfile.write(headers_data)
                    headers_sent = True
                content_length = int(response.getheader('Content-Length', 0))
                content_range = response.getheader('Content-Range', '')
                accept_ranges = response.getheader('Accept-Ranges', 'none')
                if content_range:
                    start, end, length = tuple(int(x) for x in re.search(r'bytes (\d+)-(\d+)/(\d+)', content_range).group(1, 2, 3))
                else:
                    start, end, length = 0, content_length-1, content_length

                if content_length == 0:
                    https_manager.save_ssl_connection_for_reuse(response.ssl_sock)
                    return

                send_to_broswer = True
                while True:
                    data = response.read(8192) #TODO: loop read until timeout or except.
                    if not data:
                        response.close()
                        return
                    data_len = len(data)
                    start += data_len
                    if send_to_broswer:
                        try:
                            ret = self.wfile.write(data)
                            if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ:
                                logging.debug("self.wfile.write ret:%d", ret)
                                ret = self.wfile.write(data)
                        except Exception as e_b:
                            logging.exception('GAEProxyHandler.do_METHOD_AGENT send to browser %r return %r', self.path, e_b)
                            send_to_broswer = False

                    if start >= end:
                        https_manager.save_ssl_connection_for_reuse(response.ssl_sock)
                        return
            except Exception as e:
                logging.exception('GAEProxyHandler.do_METHOD_AGENT %r return %r', self.path, e)
                errors.append(e)
                if response:
                    response.close()
                if e.args[0] in (errno.ECONNABORTED, errno.EPIPE, errno.ECONNRESET):
                    #logging.debug('GAEProxyHandler.do_METHOD_AGENT return %r', e)
                    pass
                elif e.args[0] == errno.ETIMEDOUT or isinstance(e.args[0], str) and 'timed out' in e.args[0]:
                    if content_length and accept_ranges == 'bytes':
                        # we can retry range fetch here
                        logging.warn('GAEProxyHandler.do_METHOD_AGENT timed out, content_length=%r, url=%r, try again', content_length, self.path)
                        self.headers['Range'] = 'bytes=%d-%d' % (start, end)
                elif isinstance(e, NetWorkIOError) and 'bad write retry' in e.args[-1]:
                    logging.warn('GAEProxyHandler.do_METHOD_AGENT return %r, abort. url=%r ', e, self.path)
                    return
                else:
                    logging.exception('GAEProxyHandler.do_METHOD_AGENT %r return %r', self.path, e) #IOError(9, 'Bad file descriptor'), int(e.args[0])
Ejemplo n.º 15
0
def handler(method, url, headers, body, wfile):
    time_request = time.time()

    errors = []
    response = None
    for i in xrange(max_retry):
        try:
            response = fetch(method, url, headers, body)
            if response.app_status != 200:
                logging.debug("fetch gae status:%d url:%s",
                              response.app_status, url)

            if response.app_status == 404:
                logging.warning('APPID %r not exists, remove it.',
                                response.ssl_sock.appid)
                appid_manager.report_not_exist(response.ssl_sock.appid)
                appid = appid_manager.get_appid()

                if not appid:
                    html = generate_message_html('404 No usable Appid Exists',
                                                 u'没有可用appid了,请配置可用的appid')
                    send_response(wfile, 404, body=html.encode('utf-8'))
                    response.close()
                    return
                else:
                    response.close()
                    continue

            if response.app_status == 503:
                logging.warning('APPID %r out of Quota, remove it.',
                                response.ssl_sock.appid)
                appid_manager.report_out_of_quota(response.ssl_sock.appid)
                appid = appid_manager.get_appid()

                if not appid:
                    html = generate_message_html('503 No usable Appid Exists',
                                                 u'appid流量不足,请增加appid')
                    send_response(wfile, 503, body=html.encode('utf-8'))
                    response.close()
                    return
                else:
                    response.close()
                    continue

            if response.app_status < 500:
                break

        except GAE_Exception as e:
            errors.append(e)
            logging.warn("gae_exception:%s %r", e, url)
        except Exception as e:
            errors.append(e)
            logging.exception('gae_handler.handler %r %s , retry...', e, url)

    if len(errors) == max_retry:
        if response and response.app_status >= 500:
            status = response.app_status
            headers = dict(response.getheaders())
            content = response.read()
        else:
            status = 502
            headers = {'Content-Type': 'text/html'}
            content = generate_message_html(
                '502 URLFetch failed', 'Local URLFetch %r failed' % url,
                '<br>'.join(repr(x) for x in errors))

        if response:
            response.close()

        send_response(wfile, status, headers, content.encode('utf-8'))

        logging.warn("GAE %d %s %s", status, method, url)
        return

    if response.status == 206:
        return RangeFetch(method, url, headers, body, response, wfile).fetch()

    try:

        wfile.write("HTTP/1.1 %d\r\n" % response.status)
        response_headers = {}
        for key, value in response.getheaders():
            key = key.title()
            if key == 'Transfer-Encoding':
                continue
            if key in skip_headers:
                continue
            response_headers[key] = value

        if method == "HEAD":
            if 'X-Head-Content-Length' in response_headers:
                response_headers['Content-Length'] = response_headers[
                    'X-Head-Content-Length']
                del response_headers['X-Head-Content-Length']

        for key in response_headers:
            value = response_headers[key]
            send_header(wfile, key, value)
            #logging.debug("Head- %s: %s", key, value)
        wfile.write("\r\n")

        if len(response.app_msg):
            logging.warn("APPID error:%d url:%s", response.status, url)
            wfile.write(response.app_msg)
            response.close()
            return

        content_length = int(response.getheader('Content-Length', 0))
        content_range = response.getheader('Content-Range', '')
        if content_range:
            start, end, length = tuple(
                int(x) for x in re.search(r'bytes (\d+)-(\d+)/(\d+)',
                                          content_range).group(1, 2, 3))
        else:
            start, end, length = 0, content_length - 1, content_length

        time_start = time.time()
        send_to_broswer = True
        while True:
            data = response.read(config.AUTORANGE_BUFSIZE)
            if not data and time.time() - time_start > 20:
                response.close()
                logging.warn("read timeout t:%d len:%d left:%d %s",
                             (time.time() - time_request) * 1000, length,
                             (end - start), url)
                return

            data_len = len(data)
            start += data_len
            if send_to_broswer:
                try:
                    ret = wfile.write(data)
                    if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ:
                        logging.debug("send to browser wfile.write ret:%d",
                                      ret)
                        ret = wfile.write(data)
                except Exception as e_b:
                    if e_b[0] in (errno.ECONNABORTED, errno.EPIPE,
                                  errno.ECONNRESET
                                  ) or 'bad write retry' in repr(e_b):
                        logging.warn(
                            'gae_handler send to browser return %r %r', e_b,
                            url)
                    else:
                        logging.warn(
                            'gae_handler send to browser return %r %r', e_b,
                            url)
                    send_to_broswer = False

            if start >= end:
                https_manager.save_ssl_connection_for_reuse(response.ssl_sock)
                logging.info("GAE t:%d s:%d %d %s",
                             (time.time() - time_request) * 1000, length,
                             response.status, url)
                return

    except NetWorkIOError as e:
        if e[0] in (errno.ECONNABORTED,
                    errno.EPIPE) or 'bad write retry' in repr(e):
            logging.warn("gae_handler err:%r %s ", e, url)
        else:
            logging.exception("gae_handler except:%r %s", e, url)
    except Exception as e:
        logging.exception("gae_handler except:%r %s", e, url)
Ejemplo n.º 16
0
def handler(method, url, headers, body, wfile):
    time_request = time.time()

    errors = []
    response = None
    while True:
        if time.time() - time_request > 90: #time out
            return return_fail_message(wfile)

        try:
            response = fetch(method, url, headers, body)
            if response.app_status != 200:
                logging.warn("fetch gae status:%s url:%s", response.app_status, url)


            if response.app_status == 404:
                logging.warning('APPID %r not exists, remove it.', response.ssl_sock.appid)
                appid_manager.report_not_exist(response.ssl_sock.appid)
                appid = appid_manager.get_appid()

                if not appid:
                    html = generate_message_html('404 No usable Appid Exists', u'没有可用appid了,请配置可用的appid')
                    send_response(wfile, 404, body=html.encode('utf-8'))
                    response.close()
                    return
                else:
                    response.close()
                    continue

            if response.app_status == 403 or response.app_status == 405: #Method not allowed
                # google have changed from gws to gvs, need to remove.
                logging.warning('405 Method not allowed. remove %s ', response.ssl_sock.ip)
                google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True)
                response.close()
                continue

            if response.app_status == 503:
                logging.warning('APPID %r out of Quota, remove it.', response.ssl_sock.appid)
                appid_manager.report_out_of_quota(response.ssl_sock.appid)
                appid = appid_manager.get_appid()

                if not appid:
                    html = generate_message_html('503 No usable Appid Exists', u'appid流量不足,请增加appid')
                    send_response(wfile, 503, body=html.encode('utf-8'))
                    response.close()
                    return
                else:
                    response.close()
                    continue

            if response.app_status < 500:
                break

        except GAE_Exception as e:
            errors.append(e)
            logging.warn("gae_exception:%r %s", e, url)
        except Exception as e:
            errors.append(e)
            logging.exception('gae_handler.handler %r %s , retry...', e, url)


    if response.status == 206:
        return RangeFetch(method, url, headers, body, response, wfile).fetch()

    try:
        wfile.write("HTTP/1.1 %d %s\r\n" % (response.status, response.reason))
        response_headers = {}
        for key, value in response.getheaders():
            key = key.title()
            if key == 'Transfer-Encoding':
                #http://en.wikipedia.org/wiki/Chunked_transfer_encoding
                continue
            if key in skip_headers:
                continue
            response_headers[key] = value

        if 'X-Head-Content-Length' in response_headers:
            if method == "HEAD":
                response_headers['Content-Length'] = response_headers['X-Head-Content-Length']
            del response_headers['X-Head-Content-Length']

        for key in response_headers:
            value = response_headers[key]
            send_header(wfile, key, value)
            #logging.debug("Head- %s: %s", key, value)
        wfile.write("\r\n")

        if len(response.app_msg):
            logging.warn("APPID error:%d url:%s", response.status, url)
            wfile.write(response.app_msg)
            response.close()
            return

        content_length = int(response.getheader('Content-Length', 0))
        content_range = response.getheader('Content-Range', '')
        if content_range:
            start, end, length = tuple(int(x) for x in re.search(r'bytes (\d+)-(\d+)/(\d+)', content_range).group(1, 2, 3))
        else:
            start, end, length = 0, content_length-1, content_length


        send_to_broswer = True
        while True:
            time_start = time.time()
            data = response.read(config.AUTORANGE_BUFSIZE)
            if not data and time.time() - time_start > 20:
                response.close()
                logging.warn("read timeout t:%d len:%d left:%d %s", (time.time()-time_request)*1000, length, (end-start), url)
                return

            data_len = len(data)
            start += data_len
            if send_to_broswer:
                try:
                    ret = wfile.write(data)
                    if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ:
                        logging.debug("send to browser wfile.write ret:%d", ret)
                        ret = wfile.write(data)
                except Exception as e_b:
                    if e_b[0] in (errno.ECONNABORTED, errno.EPIPE, errno.ECONNRESET) or 'bad write retry' in repr(e_b):
                        logging.warn('gae_handler send to browser return %r %r', e_b, url)
                    else:
                        logging.warn('gae_handler send to browser return %r %r', e_b, url)
                    send_to_broswer = False

            if start >= end:
                https_manager.save_ssl_connection_for_reuse(response.ssl_sock)
                logging.info("GAE t:%d s:%d %d %s", (time.time()-time_request)*1000, length, response.status, url)
                return

    except NetWorkIOError as e:
        time_except = time.time()
        time_cost = time_except - time_request
        if e[0] in (errno.ECONNABORTED, errno.EPIPE) or 'bad write retry' in repr(e):
            logging.warn("gae_handler err:%r time:%d %s ", e, time_cost, url)
        else:
            logging.exception("gae_handler except:%r %s", e, url)
    except Exception as e:
        logging.exception("gae_handler except:%r %s", e, url)
Ejemplo n.º 17
0
    def do_AGENT(self):
        """GAE http urlfetch"""
        request_headers = dict((k.title(), v) for k, v in self.headers.items())
        host = request_headers.get('Host', '')
        path = self.parsed_url.path
        range_in_query = 'range=' in self.parsed_url.query
        special_range = (any(x(host) for x in config.AUTORANGE_HOSTS_MATCH)
                         or path.endswith(config.AUTORANGE_ENDSWITH)
                         ) and not path.endswith(config.AUTORANGE_NOENDSWITH)
        if 'Range' in request_headers:
            m = re.search(r'bytes=(\d+)-', request_headers['Range'])
            start = int(m.group(1) if m else 0)
            request_headers['Range'] = 'bytes=%d-%d' % (
                start, start + config.AUTORANGE_MAXSIZE - 1)
            logging.info('autorange range=%r match url=%r',
                         request_headers['Range'], self.path)
        elif not range_in_query and special_range:
            logging.info('Found [autorange]endswith match url=%r', self.path)
            m = re.search(r'bytes=(\d+)-', request_headers.get('Range', ''))
            start = int(m.group(1) if m else 0)
            request_headers['Range'] = 'bytes=%d-%d' % (
                start, start + config.AUTORANGE_MAXSIZE - 1)

        payload = b''
        if 'Content-Length' in request_headers:
            try:
                payload = self.rfile.read(
                    int(request_headers.get('Content-Length', 0)))
            except NetWorkIOError as e:
                logging.error('handle_method_urlfetch read payload failed:%s',
                              e)
                return

        response = None
        errors = []
        headers_sent = False

        for retry in range(config.FETCHMAX_LOCAL):
            try:
                content_length = 0
                kwargs = {}

                # TODO: test validate = 1
                kwargs['validate'] = 0

                time_start = time.time()
                response = gae_urlfetch(self.command, self.path,
                                        request_headers, payload, **kwargs)
                time_stop = time.time()
                time_cost = int((time_stop - time_start) * 1000)
                if not response:
                    if retry >= config.FETCHMAX_LOCAL - 1:
                        html = generate_message_html(
                            '502 URLFetch failed',
                            'Local URLFetch %r failed' % self.path,
                            str(errors))
                        self.wfile.write(
                            b'HTTP/1.0 502\r\nContent-Type: text/html\r\n\r\n'
                            + html.encode('utf-8'))
                        logging.warning('GET no response %s ', self.path)
                        return
                    else:
                        continue

                # appid not exists, try remove it from appid
                if response.app_status == 404:
                    logging.warning('APPID %r not exists, remove it.',
                                    response.ssl_sock.appid)
                    appid_manager.report_not_exist(response.ssl_sock.appid)
                    appid = appid_manager.get_appid()

                    if not appid:
                        html = generate_message_html(
                            '404 No usable Appid Exists',
                            u'没有可用appid了,请配置可用的appid')
                        self.wfile.write(
                            b'HTTP/1.0 404\r\nContent-Type: text/html\r\n\r\n'
                            + html.encode('utf-8'))
                        response.close()
                        return
                    else:
                        continue

                # appid over qouta, switch to next appid
                if response.app_status == 503:
                    logging.warning('APPID %r out of Quota, remove it.',
                                    response.ssl_sock.appid)
                    appid_manager.report_out_of_quota(response.ssl_sock.appid)
                    appid = appid_manager.get_appid()

                    if not appid:
                        html = generate_message_html(
                            '503 No usable Appid Exists',
                            u'appid流量不足,请增加appid')
                        self.wfile.write(
                            b'HTTP/1.0 503\r\nContent-Type: text/html\r\n\r\n'
                            + html.encode('utf-8'))
                        response.close()
                        return
                    else:
                        continue

                # 500 is Web server internal err
                if response.app_status == 500 and range_in_query and special_range:
                    logging.warning('500 with range in query'
                                    )  #, need trying another APPID?
                    response.close()
                    continue

                if response.app_status == 501:
                    deploy_url = "http://127.0.0.1:8085/?module=goagent&menu=deploy"
                    message = u'请重新部署服务端: <a href="%s">%s</a>' % (deploy_url,
                                                                  deploy_url)
                    html = generate_message_html(
                        'Please deploy your new server', message)
                    self.wfile.write(
                        b'HTTP/1.0 501\r\nContent-Type: text/html\r\n\r\n' +
                        html.encode('utf-8'))
                    logging.warning('501 Please deploy your new server'
                                    )  #, need trying another APPID?
                    response.close()
                    return

                if response.app_status != 200 and retry == config.FETCHMAX_LOCAL - 1:
                    logging.warn('GAE %s %s status:%s', self.command,
                                 self.path, response.status)
                    self.wfile.write(
                        ('HTTP/1.1 %s\r\n%s\r\n' % (response.status, ''.join(
                            '%s: %s\r\n' % (k.title(), v)
                            for k, v in response.getheaders()
                            if k.title() != 'Transfer-Encoding'))))
                    self.wfile.write(response.read())
                    response.close()
                    return

                # first response, has no retry.
                if not headers_sent:
                    data_size = response.getheader('Content-Length', '0')
                    download_speed = int(data_size) * 1000 / time_cost
                    logging.info(
                        '"GAE t:%d speed:%d len:%s status:%s %s %s HTTP/1.1"',
                        time_cost, download_speed,
                        response.getheader('Content-Length', '-'),
                        response.status, self.command, self.path)
                    if response.status == 206:
                        # 206 means "Partial Content"
                        rangefetch = RangeFetch(
                            self.wfile,
                            response,
                            self.command,
                            self.path,
                            self.headers,
                            payload,
                            maxsize=config.AUTORANGE_MAXSIZE,
                            bufsize=config.AUTORANGE_BUFSIZE,
                            waitsize=config.AUTORANGE_WAITSIZE,
                            threads=config.AUTORANGE_THREADS)
                        return rangefetch.fetch()
                    if response.getheader('Set-Cookie'):
                        response.msg['Set-Cookie'] = self.normcookie(
                            response.getheader('Set-Cookie'))
                    if response.getheader('Content-Disposition'
                                          ) and '"' not in response.getheader(
                                              'Content-Disposition'):
                        response.msg[
                            'Content-Disposition'] = self.normattachment(
                                response.getheader('Content-Disposition'))
                    headers_data = 'HTTP/1.1 %s\r\n%s\r\n' % (
                        response.status, ''.join(
                            '%s: %s\r\n' % (k.title(), v)
                            for k, v in response.getheaders()
                            if k.title() != 'Transfer-Encoding'))
                    #logging.debug('headers_data=%s', headers_data)
                    #self.wfile.write(headers_data.encode() if bytes is not str else headers_data)
                    self.wfile.write(headers_data)
                    headers_sent = True
                content_length = int(response.getheader('Content-Length', 0))
                content_range = response.getheader('Content-Range', '')
                accept_ranges = response.getheader('Accept-Ranges', 'none')
                if content_range:
                    start, end, length = tuple(
                        int(x)
                        for x in re.search(r'bytes (\d+)-(\d+)/(\d+)',
                                           content_range).group(1, 2, 3))
                else:
                    start, end, length = 0, content_length - 1, content_length

                if content_length == 0:
                    https_manager.save_ssl_connection_for_reuse(
                        response.ssl_sock)
                    return

                send_to_broswer = True
                while True:
                    data = response.read(
                        8192)  #TODO: loop read until timeout or except.
                    if not data:
                        response.close()
                        return
                    data_len = len(data)
                    start += data_len
                    if send_to_broswer:
                        try:
                            ret = self.wfile.write(data)
                            if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ:
                                logging.debug("self.wfile.write ret:%d", ret)
                                ret = self.wfile.write(data)
                        except Exception as e_b:
                            logging.exception(
                                'GAEProxyHandler.do_METHOD_AGENT send to browser %r return %r',
                                self.path, e_b)
                            send_to_broswer = False

                    if start >= end:
                        https_manager.save_ssl_connection_for_reuse(
                            response.ssl_sock)
                        return
            except Exception as e:
                logging.exception(
                    'GAEProxyHandler.do_METHOD_AGENT %r return %r', self.path,
                    e)
                errors.append(e)
                if response:
                    response.close()
                if e.args[0] in (errno.ECONNABORTED, errno.EPIPE,
                                 errno.ECONNRESET):
                    #logging.debug('GAEProxyHandler.do_METHOD_AGENT return %r', e)
                    pass
                elif e.args[0] == errno.ETIMEDOUT or isinstance(
                        e.args[0], str) and 'timed out' in e.args[0]:
                    if content_length and accept_ranges == 'bytes':
                        # we can retry range fetch here
                        logging.warn(
                            'GAEProxyHandler.do_METHOD_AGENT timed out, content_length=%r, url=%r, try again',
                            content_length, self.path)
                        self.headers['Range'] = 'bytes=%d-%d' % (start, end)
                elif isinstance(
                        e, NetWorkIOError) and 'bad write retry' in e.args[-1]:
                    logging.warn(
                        'GAEProxyHandler.do_METHOD_AGENT return %r, abort. url=%r ',
                        e, self.path)
                    return
                else:
                    logging.exception(
                        'GAEProxyHandler.do_METHOD_AGENT %r return %r',
                        self.path,
                        e)  #IOError(9, 'Bad file descriptor'), int(e.args[0])
Ejemplo n.º 18
0
def request_gae_proxy(method, url, headers, body, timeout=60, retry=True):
    headers = dict(headers)
    # make retry and time out
    time_request = time.time()

    # GAE urlfetch will not decode br if Accept-Encoding include gzip
    accept_encoding = headers.get("Accept-Encoding", "")
    if "br" in accept_encoding:
        accept_br_encoding = True
        xlog.debug("accept_br_encoding for %s", url)
    else:
        accept_br_encoding = False

    host = headers.get("Host", "")
    if not host:
        parsed_url = urlparse.urlparse(url)
        host = parsed_url.hostname

    accept_codes = accept_encoding.replace(" ", "").split(",")
    if not accept_br_encoding:
        if "gzip" in accept_encoding and host in config.br_sites:
            accept_codes.remove("gzip")

    if "br" not in accept_codes:
        accept_codes.append("br")

    accept_code_str = ",".join(accept_codes)
    if accept_code_str:
        headers["Accept-Encoding"] = accept_code_str
    else:
        del headers["Accept-Encoding"]

    request_headers, request_body = pack_request(method, url, headers, body)
    error_msg = []

    while True:
        if time.time() - time_request > timeout:
            raise GAE_Exception(600, b"".join(error_msg))

        if not retry and error_msg:
            raise GAE_Exception(600, b"".join(error_msg))

        try:
            response = request_gae_server(request_headers, request_body, url, timeout)

            response = unpack_response(response)

            # xlog.debug("accept:%s content-encoding:%s url:%s", accept_encoding,
            #           response.headers.get("Content-Encoding", ""), url)
            if not accept_br_encoding:
                # if gzip in Accept-Encoding, br will not decode in urlfetch
                # else, urlfetch in GAE will auto decode br, but return br in Content-Encoding
                if response.headers.get("Content-Encoding", "") == "br":
                    # GAE urlfetch always return br in content-encoding even have decoded it.
                    del response.headers["Content-Encoding"]
                    # xlog.debug("remove br from Content-Encoding, %s", url)
                    if host not in config.br_sites:
                        br_sites = list(config.br_sites)
                        br_sites.append(host)
                        config.br_sites = tuple(br_sites)
                        xlog.warn("Add %s to br_sites", host)

            if response.app_msg:
                xlog.warn("server app return fail, status:%d",
                          response.app_status)
                # if len(response.app_msg) < 2048:
                # xlog.warn('app_msg:%s', cgi.escape(response.app_msg))

                if response.app_status == 510:
                    # reach 80% of traffic today
                    # disable for get big file.

                    appid_manager.report_out_of_quota(response.ssl_sock.appid)
                    response.worker.close(
                        "appid out of quota:%s" % response.ssl_sock.appid)
                    continue

            return response
        except GAE_Exception as e:
            err_msg = "gae_exception:%r %s" % (e, url)
            error_msg.append(err_msg)
            xlog.warn("gae_exception:%r %s", e, url)
            if e.message == '605:status:500':
                raise e
        except Exception as e:
            err_msg = 'gae_handler.handler %r %s , retry...' % (e, url)
            error_msg.append(err_msg)
            xlog.exception('gae_handler.handler %r %s , retry...', e, url)
Ejemplo n.º 19
0
def handler(method, url, headers, body, wfile):
    time_request = time.time()

    errors = []
    response = None
    while True:
        if time.time() - time_request > 30:  # time out
            html = generate_message_html("504 GoAgent Proxy Time out", u"GoAgent代理处理超时,请查看日志!")
            send_response(wfile, 504, body=html.encode("utf-8"))
            return

        try:
            response = fetch(method, url, headers, body)
            if response.app_status != 200:
                logging.debug("fetch gae status:%s url:%s", response.app_status, url)

            if response.app_status == 404:
                logging.warning("APPID %r not exists, remove it.", response.ssl_sock.appid)
                appid_manager.report_not_exist(response.ssl_sock.appid)
                appid = appid_manager.get_appid()

                if not appid:
                    html = generate_message_html("404 No usable Appid Exists", u"没有可用appid了,请配置可用的appid")
                    send_response(wfile, 404, body=html.encode("utf-8"))
                    response.close()
                    return
                else:
                    response.close()
                    continue

            if response.app_status == 405:  # Method not allowed
                logging.warning("405 Method not allowed. remove %s ", response.ssl_sock.ip)
                google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True)
                response.close()
                continue

            if response.app_status == 503:
                logging.warning("APPID %r out of Quota, remove it.", response.ssl_sock.appid)
                appid_manager.report_out_of_quota(response.ssl_sock.appid)
                appid = appid_manager.get_appid()

                if not appid:
                    html = generate_message_html("503 No usable Appid Exists", u"appid流量不足,请增加appid")
                    send_response(wfile, 503, body=html.encode("utf-8"))
                    response.close()
                    return
                else:
                    response.close()
                    continue

            if response.app_status < 500:
                break

        except GAE_Exception as e:
            errors.append(e)
            logging.warn("gae_exception:%s %r", e, url)
        except Exception as e:
            errors.append(e)
            logging.exception("gae_handler.handler %r %s , retry...", e, url)

    if len(errors) == max_retry:
        if response and response.app_status >= 500:
            status = response.app_status
            headers = dict(response.getheaders())
            content = response.read()
        else:
            status = 502
            headers = {"Content-Type": "text/html"}
            content = generate_message_html(
                "502 URLFetch failed", "Local URLFetch %r failed" % url, "<br>".join(repr(x) for x in errors)
            )

        if response:
            response.close()

        send_response(wfile, status, headers, content.encode("utf-8"))

        logging.warn("GAE %d %s %s", status, method, url)
        return

    if response.status == 206:
        return RangeFetch(method, url, headers, body, response, wfile).fetch()

    try:
        wfile.write("HTTP/1.1 %d %s\r\n" % (response.status, response.reason))
        response_headers = {}
        for key, value in response.getheaders():
            key = key.title()
            if key == "Transfer-Encoding":
                # http://en.wikipedia.org/wiki/Chunked_transfer_encoding
                continue
            if key in skip_headers:
                continue
            response_headers[key] = value

        if "X-Head-Content-Length" in response_headers:
            if method == "HEAD":
                response_headers["Content-Length"] = response_headers["X-Head-Content-Length"]
            del response_headers["X-Head-Content-Length"]

        for key in response_headers:
            value = response_headers[key]
            send_header(wfile, key, value)
            # logging.debug("Head- %s: %s", key, value)
        wfile.write("\r\n")

        if len(response.app_msg):
            logging.warn("APPID error:%d url:%s", response.status, url)
            wfile.write(response.app_msg)
            response.close()
            return

        content_length = int(response.getheader("Content-Length", 0))
        content_range = response.getheader("Content-Range", "")
        if content_range:
            start, end, length = tuple(
                int(x) for x in re.search(r"bytes (\d+)-(\d+)/(\d+)", content_range).group(1, 2, 3)
            )
        else:
            start, end, length = 0, content_length - 1, content_length

        time_start = time.time()
        send_to_broswer = True
        while True:
            data = response.read(config.AUTORANGE_BUFSIZE)
            if not data and time.time() - time_start > 20:
                response.close()
                logging.warn(
                    "read timeout t:%d len:%d left:%d %s",
                    (time.time() - time_request) * 1000,
                    length,
                    (end - start),
                    url,
                )
                return

            data_len = len(data)
            start += data_len
            if send_to_broswer:
                try:
                    ret = wfile.write(data)
                    if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ:
                        logging.debug("send to browser wfile.write ret:%d", ret)
                        ret = wfile.write(data)
                except Exception as e_b:
                    if e_b[0] in (errno.ECONNABORTED, errno.EPIPE, errno.ECONNRESET) or "bad write retry" in repr(e_b):
                        logging.warn("gae_handler send to browser return %r %r", e_b, url)
                    else:
                        logging.warn("gae_handler send to browser return %r %r", e_b, url)
                    send_to_broswer = False

            if start >= end:
                https_manager.save_ssl_connection_for_reuse(response.ssl_sock)
                logging.info("GAE t:%d s:%d %d %s", (time.time() - time_request) * 1000, length, response.status, url)
                return

    except NetWorkIOError as e:
        if e[0] in (errno.ECONNABORTED, errno.EPIPE) or "bad write retry" in repr(e):
            logging.warn("gae_handler err:%r %s ", e, url)
        else:
            logging.exception("gae_handler except:%r %s", e, url)
    except Exception as e:
        logging.exception("gae_handler except:%r %s", e, url)