Exemplo n.º 1
0
        def head_request(ssl_sock):
            if ssl_sock.host == '':
                ssl_sock.appid = appid_manager.get_appid()
                if not ssl_sock.appid:
                    logging.error("no appid can use")
                    return False
                host = ssl_sock.appid + ".appspot.com"
                ssl_sock.host = host
            else:
                host = ssl_sock.host

            logging.debug("head request %s", host)

            request_data = 'HEAD /_gh/ HTTP/1.1\r\nHost: %s\r\n\r\n' % host

            try:
                ssl_sock.settimeout(2)

                ssl_sock.send(request_data.encode())
                response = httplib.HTTPResponse(ssl_sock, buffering=True)

                response.begin()

                status = response.status
                if status != 200:
                    logging.debug("app head fail status:%d", status)
                    raise Exception("app check fail")
                content = response.read()
                return True
            except Exception as e:
                logging.debug("head request fail:%r", e)
                return False
            finally:
                response.close()
Exemplo n.º 2
0
def request(headers={}, payload=None):
    max_retry = 3
    for i in range(max_retry):
        ssl_sock = None
        try:
            ssl_sock = https_manager.create_ssl_connection()
            if not ssl_sock:
                logging.debug('create_ssl_connection fail')
                continue

            if ssl_sock.host == '':
                ssl_sock.appid = appid_manager.get_appid()
                if not ssl_sock.appid:
                    raise GAE_Exception(1, "no appid can use")
                headers['Host'] = ssl_sock.appid + ".appspot.com"
                ssl_sock.host = headers['Host']
            else:
                headers['Host'] = ssl_sock.host


            response = _request(ssl_sock, headers, payload)
            if not response:
                continue

            response.ssl_sock = ssl_sock
            return response

        except Exception as e:
            logging.warn('request failed:%s', e)
            if ssl_sock:
                ssl_sock.close()
    raise GAE_Exception(2, "try max times")
Exemplo n.º 3
0
    def on_ssl_created_cb(self, ssl_sock, check_free_worke=True):
        if not ssl_sock:
            raise Exception("on_ssl_created_cb ssl_sock None")

        appid = appid_manager.get_appid()
        if not appid:
            time.sleep(60)
            ssl_sock.close()
            raise http_common.GAE_Exception(1, "no appid can use")

        ssl_sock.appid = appid
        ssl_sock.host = ssl_sock.appid + ".appspot.com"

        if ssl_sock.h2:
            worker = HTTP2_worker(ssl_sock, self.close_cb, self.retry_task_cb, self._on_worker_idle_cb, self.log_debug_data)
            self.h2_num += 1
        else:
            worker = HTTP1_worker(ssl_sock, self.close_cb, self.retry_task_cb, self._on_worker_idle_cb, self.log_debug_data)
            self.h1_num += 1

        self.workers.append(worker)

        self.wait_a_worker_cv.notify()

        if check_free_worke:
            self.check_free_worker()
Exemplo n.º 4
0
def test_app_head(ssl_sock, ip):
    appid = appid_manager.get_appid()
    request_data = 'GET / HTTP/1.1\r\nHost: %s.appspot.com\r\n\r\n' % appid
    time_start = time.time()
    ssl_sock.send(request_data.encode())
    response = httplib.HTTPResponse(ssl_sock, buffering=True)
    try:
        response.begin()
        status = response.status
        if status != 200:
            xlog.debug("app check %s status:%d", ip, status)
            raise Exception("app check fail")
        content = response.read()
        if "GoAgent" not in content:
            xlog.debug("app check %s content:%s", ip, content)
            raise Exception("content fail")
    except Exception as e:
        xlog.exception("test_app_head except:%r", e)
        return False
    finally:
        response.close()
    time_stop = time.time()
    time_cost = (time_stop - time_start)*1000
    xlog.debug("app check time:%d", time_cost)
    return True
Exemplo n.º 5
0
def test_gae_ip(ip):
    try:
        ssl_sock = connect_ssl(ip, timeout=max_timeout, openssl_context=openssl_context)
        get_ssl_cert_domain(ssl_sock)

        appid = appid_manager.get_appid()
        check_appid(ssl_sock, appid)

        return ssl_sock
    except Exception as e:
        #xlog.exception("test_gae_ip %s e:%r",ip, e)
        return False
Exemplo n.º 6
0
    def head_request(self, ssl_sock):
        if ssl_sock.host == '':
            ssl_sock.appid = appid_manager.get_appid()
            if not ssl_sock.appid:
                xlog.error("no appid can use")
                return False
            host = ssl_sock.appid + ".appspot.com"
            ssl_sock.host = host
        else:
            host = ssl_sock.host

        # public appid don't keep alive, for quota limit.
        if ssl_sock.appid.startswith(
                "xxnet-") and ssl_sock.appid[7:].isdigit():
            #logging.info("public appid don't keep alive")
            #self.keep_alive = 0
            return False

        #logging.debug("head request %s", host)

        request_data = 'HEAD /_gh/ HTTP/1.1\r\nHost: %s\r\n\r\n' % host

        response = None
        try:
            ssl_sock.settimeout(10)
            ssl_sock.sock.settimeout(10)

            data = request_data.encode()
            ret = ssl_sock.send(data)
            if ret != len(data):
                xlog.warn("head send len:%d %d", ret, len(data))
            response = httplib.HTTPResponse(ssl_sock, buffering=True)

            response.begin()

            status = response.status
            if status != 200:
                xlog.debug("app head fail status:%d", status)
                raise Exception("app check fail")
            return True
        except httplib.BadStatusLine as e:
            inactive_time = time.time() - ssl_sock.last_use_time
            xlog.debug("%s keep alive fail, time:%d", ssl_sock.ip,
                       inactive_time)
            return False
        except Exception as e:
            xlog.debug("head request fail:%r", e)
            return False
        finally:
            if response:
                response.close()
Exemplo n.º 7
0
def test_gae_ip(ip):
    try:
        ssl_sock = connect_ssl(ip,
                               timeout=max_timeout,
                               openssl_context=openssl_context)
        get_ssl_cert_domain(ssl_sock)

        appid = appid_manager.get_appid()
        check_appid(ssl_sock, appid)

        return ssl_sock
    except Exception as e:
        #xlog.exception("test_gae_ip %s e:%r",ip, e)
        return False
Exemplo n.º 8
0
    def head_request(self, ssl_sock):
        if ssl_sock.host == '':
            ssl_sock.appid = appid_manager.get_appid()
            if not ssl_sock.appid:
                xlog.error("no appid can use")
                return False
            host = ssl_sock.appid + ".appspot.com"
            ssl_sock.host = host
        else:
            host = ssl_sock.host

        # public appid don't keep alive, for quota limit.
        if ssl_sock.appid.startswith("xxnet-") and ssl_sock.appid[7:].isdigit():
            #logging.info("public appid don't keep alive")
            #self.keep_alive = 0
            return False

        #logging.debug("head request %s", host)

        request_data = 'HEAD /_gh/ HTTP/1.1\r\nHost: %s\r\n\r\n' % host

        response = None
        try:
            ssl_sock.settimeout(10)
            ssl_sock._sock.settimeout(10)

            data = request_data.encode()
            ret = ssl_sock.send(data)
            if ret != len(data):
                xlog.warn("head send len:%d %d", ret, len(data))
            response = httplib.HTTPResponse(ssl_sock, buffering=True)

            response.begin()

            status = response.status
            if status != 200:
                xlog.debug("app head fail status:%d", status)
                raise Exception("app check fail %r" % status)
            return True
        except httplib.BadStatusLine as e:
            inactive_time = time.time() - ssl_sock.last_use_time
            xlog.debug("%s keep alive fail, time:%d", ssl_sock.ip, inactive_time)
            return False
        except Exception as e:
            xlog.warn("%s head %s request fail:%r", ssl_sock.ip, ssl_sock.appid, e)
            return False
        finally:
            if response:
                response.close()
Exemplo n.º 9
0
def test_gae_ip(ip, appid=None):
    try:
        ssl_sock = connect_ssl(ip, timeout=max_timeout, openssl_context=openssl_context)
        get_ssl_cert_domain(ssl_sock)

        if not appid:
            appid = appid_manager.get_appid()
        if appid.startswith("xxnet-"):
            appid = "xxnet-check"
        if not check_appid(ssl_sock, appid, ip):
            return False

        return ssl_sock
    except Exception as e:
        #xlog.exception("test_gae_ip %s e:%r",ip, e)
        return False
Exemplo n.º 10
0
    def on_ssl_created_cb(self, ssl_sock):
        appid = appid_manager.get_appid()
        if not appid:
            time.sleep(60)
            ssl_sock.close()
            raise GAE_Exception(1, "no appid can use")

        ssl_sock.appid = appid
        ssl_sock.host = ssl_sock.appid + ".appspot.com"

        if ssl_sock.h2:
            worker = HTTP2_worker(ssl_sock, self.close_cb, self.retry_task_cb)
            self.h2_num += 1
        else:
            worker = HTTP1_worker(ssl_sock, self.close_cb, self.retry_task_cb)
            self.h1_num += 1

        self.workers.append(worker)
        return worker
Exemplo n.º 11
0
    def on_ssl_created_cb(self, ssl_sock):
        appid = appid_manager.get_appid()
        if not appid:
            time.sleep(60)
            ssl_sock.close()
            raise GAE_Exception(1, "no appid can use")

        ssl_sock.appid = appid
        ssl_sock.host = ssl_sock.appid + ".appspot.com"

        if ssl_sock.h2:
            worker = HTTP2_worker(ssl_sock, self.close_cb, self.retry_task_cb)
            self.h2_num += 1
        else:
            worker = HTTP1_worker(ssl_sock, self.close_cb, self.retry_task_cb)
            self.h1_num += 1

        self.workers.append(worker)
        return worker
Exemplo n.º 12
0
def test_app_head(ssl_sock, ip):
    appid = appid_manager.get_appid()
    request_data = 'HEAD /_gh/ HTTP/1.1\r\nHost: %s.appspot.com\r\n\r\n' % appid
    time_start = time.time()
    ssl_sock.send(request_data.encode())
    response = httplib.HTTPResponse(ssl_sock, buffering=True)
    try:
        response.begin()
        status = response.status
        if status != 200:
            xlog.debug("app check %s status:%d", ip, status)
            raise Exception("app check fail")

    except Exception as e:
        xlog.exception("test_app_head except:%r", e)
        return False
    finally:
        response.close()
    time_stop = time.time()
    time_cost = (time_stop - time_start) * 1000
    xlog.debug("app check time:%d", time_cost)
    return True
Exemplo n.º 13
0
        def head_request(ssl_sock):
            if ssl_sock.host == '':
                ssl_sock.appid = appid_manager.get_appid()
                if not ssl_sock.appid:
                    logging.error("no appid can use")
                    return False
                host = ssl_sock.appid + ".appspot.com"
                ssl_sock.host = host
            else:
                host = ssl_sock.host

            #logging.debug("head request %s", host)

            request_data = 'HEAD /_gh/ HTTP/1.1\r\nHost: %s\r\n\r\n' % host

            try:
                ssl_sock.settimeout(2)

                ssl_sock.send(request_data.encode())
                response = httplib.HTTPResponse(ssl_sock, buffering=True)

                response.begin()

                status = response.status
                if status != 200:
                    logging.debug("app head fail status:%d", status)
                    raise Exception("app check fail")
                content = response.read()
                return True
            except httplib.BadStatusLine as e:
                return False
            except Exception as e:
                logging.debug("head request fail:%r", e)
                return False
            finally:
                response.close()
Exemplo n.º 14
0
def request(headers={}, payload=None):
    max_retry = 3
    for i in range(max_retry):
        ssl_sock = None
        try:
            ssl_sock = https_manager.get_ssl_connection()
            if not ssl_sock:
                xlog.debug('create_ssl_connection fail')
                continue

            if ssl_sock.host == '':
                ssl_sock.appid = appid_manager.get_appid()
                if not ssl_sock.appid:
                    google_ip.report_connect_closed(ssl_sock.ip, "no appid")
                    time.sleep(60)
                    raise GAE_Exception(1, "no appid can use")
                headers['Host'] = ssl_sock.appid + ".appspot.com"
                ssl_sock.host = headers['Host']
            else:
                headers['Host'] = ssl_sock.host

            response = _request(ssl_sock, headers, payload)
            if not response:
                google_ip.report_connect_closed(ssl_sock.ip, "request_fail")
                ssl_sock.close()
                continue

            response.ssl_sock = ssl_sock
            return response

        except Exception as e:
            xlog.exception('request failed:%s', e)
            if ssl_sock:
                google_ip.report_connect_closed(ssl_sock.ip, "request_except")
                ssl_sock.close()
    raise GAE_Exception(2, "try max times")
Exemplo n.º 15
0
def request(headers={}, payload=None):
    max_retry = 3
    for i in range(max_retry):
        ssl_sock = None
        try:
            ssl_sock = https_manager.get_ssl_connection()
            if not ssl_sock:
                xlog.debug('create_ssl_connection fail')
                continue

            if ssl_sock.host == '':
                ssl_sock.appid = appid_manager.get_appid()
                if not ssl_sock.appid:
                    google_ip.report_connect_closed(ssl_sock.ip, "no appid")
                    time.sleep(60)
                    raise GAE_Exception(1, "no appid can use")
                headers['Host'] = ssl_sock.appid + ".appspot.com"
                ssl_sock.host = headers['Host']
            else:
                headers['Host'] = ssl_sock.host

            response = _request(ssl_sock, headers, payload)
            if not response:
                google_ip.report_connect_closed(ssl_sock.ip, "request_fail")
                ssl_sock.close()
                continue

            response.ssl_sock = ssl_sock
            return response

        except Exception as e:
            xlog.exception('request failed:%s', e)
            if ssl_sock:
                google_ip.report_connect_closed(ssl_sock.ip, "request_except")
                ssl_sock.close()
    raise GAE_Exception(2, "try max times")
Exemplo n.º 16
0
    def __fetchlet(self, range_queue, data_queue, range_delay_size):
        headers = dict((k.title(), v) for k, v in self.headers.items())
        headers['Connection'] = 'close'
        while not self._stopped:
            try:
                try:
                    start, end, response = range_queue.get(timeout=1)
                    if self.expect_begin < start and data_queue.qsize() * self.bufsize + range_delay_size > 30*1024*1024:
                        range_queue.put((start, end, response))
                        time.sleep(10)
                        continue
                    headers['Range'] = 'bytes=%d-%d' % (start, end)
                    if not response:
                        response = fetch(self.method, self.url, headers, self.body)
                except Queue.Empty:
                    continue
                except Exception as e:
                    xlog.warning("RangeFetch fetch response %r in __fetchlet", e)
                    range_queue.put((start, end, None))
                    continue

                if not response:
                    xlog.warning('RangeFetch %s return %r', headers['Range'], response)
                    range_queue.put((start, end, None))
                    continue
                if response.app_status != 200:
                    xlog.warning('Range Fetch return %s "%s %s" %s ', response.app_status, self.method, self.url, headers['Range'])

                    if response.app_status == 404:
                        xlog.warning('APPID %r not exists, remove it.', response.ssl_sock.appid)
                        appid_manager.report_not_exist(response.ssl_sock.appid, response.ssl_sock.ip)
                        appid = appid_manager.get_appid()
                        if not appid:
                            xlog.error("no appid left")
                            self._stopped = True
                            response.close()
                            return

                    if response.app_status == 503:
                        xlog.warning('APPID %r out of Quota, remove it temporary.', response.ssl_sock.appid)
                        appid_manager.report_out_of_quota(response.ssl_sock.appid)
                        appid = appid_manager.get_appid()
                        if not appid:
                            xlog.error("no appid left")
                            self._stopped = True
                            response.close()
                            return

                    google_ip.report_connect_closed(response.ssl_sock.ip, "app err")
                    response.close()
                    range_queue.put((start, end, None))
                    continue

                if response.getheader('Location'):
                    self.url = urlparse.urljoin(self.url, response.getheader('Location'))
                    xlog.info('RangeFetch Redirect(%r)', self.url)
                    google_ip.report_connect_closed(response.ssl_sock.ip, "reLocation")
                    response.close()
                    range_queue.put((start, end, None))
                    continue

                if 200 <= response.status < 300:
                    content_range = response.getheader('Content-Range')
                    if not content_range:
                        xlog.warning('RangeFetch "%s %s" return Content-Range=%r: response headers=%r, retry %s-%s',
                            self.method, self.url, content_range, response.getheaders(), start, end)
                        google_ip.report_connect_closed(response.ssl_sock.ip, "no range")
                        response.close()
                        range_queue.put((start, end, None))
                        continue
                    content_length = int(response.getheader('Content-Length', 0))
                    xlog.info('>>>>>>>>>>>>>>> [thread %s] %s %s', threading.currentThread().ident, content_length, content_range)

                    time_last_read = time.time()
                    while start < end + 1:
                        try:
                            data = response.read(self.bufsize)
                            if not data:
                                if time.time() - time_last_read > 20:
                                    break
                                else:
                                    time.sleep(0.1)
                                    continue

                            time_last_read = time.time()
                            data_len = len(data)
                            data_queue.put((start, data))
                            start += data_len

                        except Exception as e:
                            xlog.warning('RangeFetch "%s %s" %s failed: %s', self.method, self.url, headers['Range'], e)
                            break

                    if start < end + 1:
                        xlog.warning('RangeFetch "%s %s" retry %s-%s', self.method, self.url, start, end)
                        google_ip.report_connect_closed(response.ssl_sock.ip, "down err")
                        response.close()
                        range_queue.put((start, end, None))
                        continue

                    https_manager.save_ssl_connection_for_reuse(response.ssl_sock)
                    xlog.info('>>>>>>>>>>>>>>> Successfully reached %d bytes.', start - 1)
                else:
                    xlog.error('RangeFetch %r return %s', self.url, response.status)
                    google_ip.report_connect_closed(response.ssl_sock.ip, "status err")
                    response.close()
                    range_queue.put((start, end, None))
                    continue
            except StandardError as e:
                xlog.exception('RangeFetch._fetchlet error:%s', e)
                raise
Exemplo n.º 17
0
def handler(method, url, headers, body, wfile):
    time_request = time.time()

    headers = clean_empty_header(headers)
    errors = []
    response = None
    while True:
        if time.time() - time_request > 30: #time out
            return return_fail_message(wfile)

        try:
            response = fetch(method, url, headers, body)
            if response.app_status != 200:
                xlog.warn("fetch gae status:%s url:%s", response.app_status, url)

                try:
                    server_type = response.getheader('Server', "")
                    if "gws" not in server_type and "Google Frontend" not in server_type and "GFE" not in server_type:
                        xlog.warn("IP:%s not support GAE, server type:%s", response.ssl_sock.ip, server_type)
                        google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True)
                        response.close()
                        continue
                except Exception as e:
                    errors.append(e)
                    xlog.warn('gae_handler.handler %r %s , retry...', e, url)
                    continue

            if response.app_status == 404:
                #xlog.warning('APPID %r not exists, remove it.', response.ssl_sock.appid)
                appid_manager.report_not_exist(response.ssl_sock.appid, response.ssl_sock.ip)
                google_ip.report_connect_closed(response.ssl_sock.ip, "appid not exist")
                appid = appid_manager.get_appid()

                if not appid:
                    html = generate_message_html('404 No usable Appid Exists', u'没有可用appid了,请配置可用的appid')
                    send_response(wfile, 404, body=html.encode('utf-8'))
                    response.close()
                    return
                else:
                    response.close()
                    continue

            if response.app_status == 403 or response.app_status == 405: #Method not allowed
                # google have changed from gws to gvs, need to remove.
                xlog.warning('405 Method not allowed. remove %s ', response.ssl_sock.ip)
                # some ip can connect, and server type is gws
                # but can't use as GAE server
                # so we need remove it immediately
                google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True)
                response.close()
                continue

            if response.app_status == 503:
                xlog.warning('APPID %r out of Quota, remove it. %s', response.ssl_sock.appid, response.ssl_sock.ip)
                appid_manager.report_out_of_quota(response.ssl_sock.appid)
                google_ip.report_connect_closed(response.ssl_sock.ip, "out of quota")
                appid = appid_manager.get_appid()

                if not appid:
                    html = generate_message_html('503 No usable Appid Exists', u'appid流量不足,请增加appid')
                    send_response(wfile, 503, body=html.encode('utf-8'))
                    response.close()
                    return
                else:
                    response.close()
                    continue

            if response.app_status < 500:
                break

        except GAE_Exception as e:
            errors.append(e)
            xlog.warn("gae_exception:%r %s", e, url)
        except Exception as e:
            errors.append(e)
            xlog.exception('gae_handler.handler %r %s , retry...', e, url)


    if response.status == 206:
        return RangeFetch(method, url, headers, body, response, wfile).fetch()

    try:
        wfile.write("HTTP/1.1 %d %s\r\n" % (response.status, response.reason))
        response_headers = {}
        for key, value in response.getheaders():
            key = key.title()
            if key == 'Transfer-Encoding':
                #http://en.wikipedia.org/wiki/Chunked_transfer_encoding
                continue
            if key in skip_headers:
                continue
            response_headers[key] = value

        if 'X-Head-Content-Length' in response_headers:
            if method == "HEAD":
                response_headers['Content-Length'] = response_headers['X-Head-Content-Length']
            del response_headers['X-Head-Content-Length']

        send_to_browser = True
        try:
            for key in response_headers:
                value = response_headers[key]
                send_header(wfile, key, value)
                #logging.debug("Head- %s: %s", key, value)
            wfile.write("\r\n")
        except Exception as e:
            send_to_browser = False
            xlog.warn("gae_handler.handler send response fail. t:%d e:%r %s", time.time()-time_request, e, url)


        if len(response.app_msg):
            xlog.warn("APPID error:%d url:%s", response.status, url)
            wfile.write(response.app_msg)
            google_ip.report_connect_closed(response.ssl_sock.ip, "app err")
            response.close()
            return

        content_length = int(response.getheader('Content-Length', 0))
        content_range = response.getheader('Content-Range', '')
        if content_range:
            start, end, length = tuple(int(x) for x in re.search(r'bytes (\d+)-(\d+)/(\d+)', content_range).group(1, 2, 3))
        else:
            start, end, length = 0, content_length-1, content_length
        body_length = end - start + 1

        last_read_time = time.time()
        time_response = time.time()
        while True:
            if start > end:
                time_finished = time.time()
                if body_length > 1024 and time_finished - time_response > 0:
                    speed = body_length / (time_finished - time_response)


                    xlog.info("GAE %d|%s|%d t:%d s:%d hs:%d Spd:%d %d %s",
                        response.ssl_sock.fd, response.ssl_sock.ip, response.ssl_sock.received_size, (time_finished-time_request)*1000,
                        length, response.ssl_sock.handshake_time, int(speed), response.status, url)
                else:
                    xlog.info("GAE %d|%s|%d t:%d s:%d hs:%d %d %s",
                        response.ssl_sock.fd, response.ssl_sock.ip, response.ssl_sock.received_size, (time_finished-time_request)*1000,
                        length, response.ssl_sock.handshake_time, response.status, url)

                response.ssl_sock.received_size += body_length
                https_manager.save_ssl_connection_for_reuse(response.ssl_sock, call_time=time_request)
                return

            data = response.read(config.AUTORANGE_BUFSIZE)
            if not data:
                if time.time() - last_read_time > 20:
                    google_ip.report_connect_closed(response.ssl_sock.ip, "down fail")
                    response.close()
                    xlog.warn("read timeout t:%d len:%d left:%d %s", (time.time()-time_request)*1000, length, (end-start), url)
                    return
                else:
                    time.sleep(0.1)
                    continue

            last_read_time = time.time()
            data_len = len(data)
            start += data_len
            if send_to_browser:
                try:
                    ret = wfile.write(data)
                    if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ:
                        xlog.debug("send to browser wfile.write ret:%d", ret)
                        ret = wfile.write(data)
                except Exception as e_b:
                    if e_b[0] in (errno.ECONNABORTED, errno.EPIPE, errno.ECONNRESET) or 'bad write retry' in repr(e_b):
                        xlog.warn('gae_handler send to browser return %r %r', e_b, url)
                    else:
                        xlog.warn('gae_handler send to browser return %r %r', e_b, url)
                    send_to_browser = False

    except NetWorkIOError as e:
        time_except = time.time()
        time_cost = time_except - time_request
        if e[0] in (errno.ECONNABORTED, errno.EPIPE) or 'bad write retry' in repr(e):
            xlog.warn("gae_handler err:%r time:%d %s ", e, time_cost, url)
            google_ip.report_connect_closed(response.ssl_sock.ip, "Net")
        else:
            xlog.exception("gae_handler except:%r %s", e, url)
    except Exception as e:
        xlog.exception("gae_handler except:%r %s", e, url)
Exemplo n.º 18
0
def handler(method, url, headers, body, wfile):
    time_request = time.time()

    errors = []
    response = None
    for i in xrange(max_retry):
        try:
            response = fetch(method, url, headers, body)
            if response.app_status != 200:
                logging.debug("fetch gae status:%d url:%s", response.app_status, url)

            if response.app_status == 404:
                logging.warning('APPID %r not exists, remove it.', response.ssl_sock.appid)
                appid_manager.report_not_exist(response.ssl_sock.appid)
                appid = appid_manager.get_appid()

                if not appid:
                    html = generate_message_html('404 No usable Appid Exists', u'没有可用appid了,请配置可用的appid')
                    send_response(wfile, 404, body=html.encode('utf-8'))
                    response.close()
                    return
                else:
                    response.close()
                    continue

            if response.app_status == 503:
                logging.warning('APPID %r out of Quota, remove it.', response.ssl_sock.appid)
                appid_manager.report_out_of_quota(response.ssl_sock.appid)
                appid = appid_manager.get_appid()

                if not appid:
                    html = generate_message_html('503 No usable Appid Exists', u'appid流量不足,请增加appid')
                    send_response(wfile, 503, body=html.encode('utf-8'))
                    response.close()
                    return
                else:
                    response.close()
                    continue

            if response.app_status < 500:
                break

        except GAE_Exception as e:
            errors.append(e)
            logging.warn("gae_exception:%s %r", e, url)
        except Exception as e:
            errors.append(e)
            logging.exception('gae_handler.handler %r %s , retry...', e, url)

    if len(errors) == max_retry:
        if response and response.app_status >= 500:
            status = response.app_status
            headers = dict(response.getheaders())
            content = response.read()
        else:
            status = 502
            headers = {'Content-Type': 'text/html'}
            content = generate_message_html('502 URLFetch failed', 'Local URLFetch %r failed' % url, '<br>'.join(repr(x) for x in errors))

        if response:
            response.close()

        send_response(wfile, status, headers, content.encode('utf-8'))

        logging.warn("GAE %d %s %s", status, method, url)
        return

    if response.status == 206:
        return RangeFetch(method, url, headers, body, response, wfile).fetch()

    try:
        wfile.write("HTTP/1.1 %d %s\r\n" % (response.status, response.reason))
        response_headers = {}
        for key, value in response.getheaders():
            key = key.title()
            if key == 'Transfer-Encoding':
                continue
            if key in skip_headers:
                continue
            response_headers[key] = value

        if method == "HEAD":
            if 'X-Head-Content-Length' in response_headers:
                response_headers['Content-Length'] = response_headers['X-Head-Content-Length']
                del response_headers['X-Head-Content-Length']

        for key in response_headers:
            value = response_headers[key]
            send_header(wfile, key, value)
            #logging.debug("Head- %s: %s", key, value)
        wfile.write("\r\n")

        if len(response.app_msg):
            logging.warn("APPID error:%d url:%s", response.status, url)
            wfile.write(response.app_msg)
            response.close()
            return

        content_length = int(response.getheader('Content-Length', 0))
        content_range = response.getheader('Content-Range', '')
        if content_range:
            start, end, length = tuple(int(x) for x in re.search(r'bytes (\d+)-(\d+)/(\d+)', content_range).group(1, 2, 3))
        else:
            start, end, length = 0, content_length-1, content_length

        time_start = time.time()
        send_to_broswer = True
        while True:
            data = response.read(config.AUTORANGE_BUFSIZE)
            if not data and time.time() - time_start > 20:
                response.close()
                logging.warn("read timeout t:%d len:%d left:%d %s", (time.time()-time_request)*1000, length, (end-start), url)
                return

            data_len = len(data)
            start += data_len
            if send_to_broswer:
                try:
                    ret = wfile.write(data)
                    if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ:
                        logging.debug("send to browser wfile.write ret:%d", ret)
                        ret = wfile.write(data)
                except Exception as e_b:
                    if e_b[0] in (errno.ECONNABORTED, errno.EPIPE, errno.ECONNRESET) or 'bad write retry' in repr(e_b):
                        logging.warn('gae_handler send to browser return %r %r', e_b, url)
                    else:
                        logging.warn('gae_handler send to browser return %r %r', e_b, url)
                    send_to_broswer = False

            if start >= end:
                https_manager.save_ssl_connection_for_reuse(response.ssl_sock)
                logging.info("GAE t:%d s:%d %d %s", (time.time()-time_request)*1000, length, response.status, url)
                return

    except NetWorkIOError as e:
        if e[0] in (errno.ECONNABORTED, errno.EPIPE) or 'bad write retry' in repr(e):
            logging.warn("gae_handler err:%r %s ", e, url)
        else:
            logging.exception("gae_handler except:%r %s", e, url)
    except Exception as e:
        logging.exception("gae_handler except:%r %s", e, url)
Exemplo n.º 19
0
    def do_AGENT(self):
        """GAE http urlfetch"""
        request_headers = dict((k.title(), v) for k, v in self.headers.items())
        host = request_headers.get('Host', '')
        path = self.parsed_url.path
        range_in_query = 'range=' in self.parsed_url.query
        special_range = (any(x(host) for x in config.AUTORANGE_HOSTS_MATCH) or path.endswith(config.AUTORANGE_ENDSWITH)) and not path.endswith(config.AUTORANGE_NOENDSWITH)
        if 'Range' in request_headers:
            m = re.search(r'bytes=(\d+)-', request_headers['Range'])
            start = int(m.group(1) if m else 0)
            request_headers['Range'] = 'bytes=%d-%d' % (start, start+config.AUTORANGE_MAXSIZE-1)
            logging.info('autorange range=%r match url=%r', request_headers['Range'], self.path)
        elif not range_in_query and special_range:
            logging.info('Found [autorange]endswith match url=%r', self.path)
            m = re.search(r'bytes=(\d+)-', request_headers.get('Range', ''))
            start = int(m.group(1) if m else 0)
            request_headers['Range'] = 'bytes=%d-%d' % (start, start+config.AUTORANGE_MAXSIZE-1)

        payload = b''
        if 'Content-Length' in request_headers:
            try:
                payload = self.rfile.read(int(request_headers.get('Content-Length', 0)))
            except NetWorkIOError as e:
                logging.error('handle_method_urlfetch read payload failed:%s', e)
                return

        response = None
        errors = []
        headers_sent = False


        for retry in range(config.FETCHMAX_LOCAL):
            try:
                content_length = 0
                kwargs = {}

                # TODO: test validate = 1
                kwargs['validate'] = 0

                time_start = time.time()
                response = gae_urlfetch(self.command, self.path, request_headers, payload, **kwargs)
                time_stop = time.time()
                time_cost = int((time_stop - time_start) * 1000)
                if not response:
                    if retry >= config.FETCHMAX_LOCAL-1:
                        html = generate_message_html('502 URLFetch failed', 'Local URLFetch %r failed' % self.path, str(errors))
                        self.wfile.write(b'HTTP/1.0 502\r\nContent-Type: text/html\r\n\r\n' + html.encode('utf-8'))
                        logging.warning('GET no response %s ', self.path)
                        return
                    else:
                        continue

                # appid not exists, try remove it from appid
                if response.app_status == 404:
                    logging.warning('APPID %r not exists, remove it.', response.ssl_sock.appid)
                    appid_manager.report_not_exist(response.ssl_sock.appid)
                    appid = appid_manager.get_appid()

                    if not appid:
                        html = generate_message_html('404 No usable Appid Exists', u'没有可用appid了,请配置可用的appid')
                        self.wfile.write(b'HTTP/1.0 404\r\nContent-Type: text/html\r\n\r\n' + html.encode('utf-8'))
                        response.close()
                        return
                    else:
                        continue

                # appid over qouta, switch to next appid
                if response.app_status == 503:
                    logging.warning('APPID %r out of Quota, remove it.', response.ssl_sock.appid)
                    appid_manager.report_out_of_quota(response.ssl_sock.appid)
                    appid = appid_manager.get_appid()

                    if not appid:
                        html = generate_message_html('503 No usable Appid Exists', u'appid流量不足,请增加appid')
                        self.wfile.write(b'HTTP/1.0 503\r\nContent-Type: text/html\r\n\r\n' + html.encode('utf-8'))
                        response.close()
                        return
                    else:
                        continue

                # 500 is Web server internal err
                if response.app_status == 500 and range_in_query and special_range:
                    logging.warning('500 with range in query') #, need trying another APPID?
                    response.close()
                    continue

                if response.app_status == 501:
                    deploy_url = "http://127.0.0.1:8085/?module=goagent&menu=deploy"
                    message = u'请重新部署服务端: <a href="%s">%s</a>' % (deploy_url, deploy_url)
                    html = generate_message_html('Please deploy your new server', message)
                    self.wfile.write(b'HTTP/1.0 501\r\nContent-Type: text/html\r\n\r\n' + html.encode('utf-8'))
                    logging.warning('501 Please deploy your new server') #, need trying another APPID?
                    response.close()
                    return

                if response.app_status != 200 and retry == config.FETCHMAX_LOCAL-1:
                    logging.warn('GAE %s %s status:%s', self.command, self.path, response.status)
                    self.wfile.write(('HTTP/1.1 %s\r\n%s\r\n' % (response.status, ''.join('%s: %s\r\n' % (k.title(), v) for k, v in response.getheaders() if k.title() != 'Transfer-Encoding'))))
                    self.wfile.write(response.read())
                    response.close()
                    return

                # first response, has no retry.
                if not headers_sent:
                    data_size = response.getheader('Content-Length', '0')
                    download_speed = int(data_size) * 1000 / time_cost
                    logging.info('"GAE t:%d speed:%d len:%s status:%s %s %s HTTP/1.1"', time_cost, download_speed, response.getheader('Content-Length', '-'), response.status, self.command, self.path)
                    if response.status == 206:
                        # 206 means "Partial Content"
                        rangefetch = RangeFetch(self.wfile, response, self.command, self.path, self.headers, payload, maxsize=config.AUTORANGE_MAXSIZE, bufsize=config.AUTORANGE_BUFSIZE, waitsize=config.AUTORANGE_WAITSIZE, threads=config.AUTORANGE_THREADS)
                        return rangefetch.fetch()
                    if response.getheader('Set-Cookie'):
                        response.msg['Set-Cookie'] = self.normcookie(response.getheader('Set-Cookie'))
                    if response.getheader('Content-Disposition') and '"' not in response.getheader('Content-Disposition'):
                        response.msg['Content-Disposition'] = self.normattachment(response.getheader('Content-Disposition'))
                    headers_data = 'HTTP/1.1 %s\r\n%s\r\n' % (response.status, ''.join('%s: %s\r\n' % (k.title(), v) for k, v in response.getheaders() if k.title() != 'Transfer-Encoding'))
                    #logging.debug('headers_data=%s', headers_data)
                    #self.wfile.write(headers_data.encode() if bytes is not str else headers_data)
                    self.wfile.write(headers_data)
                    headers_sent = True
                content_length = int(response.getheader('Content-Length', 0))
                content_range = response.getheader('Content-Range', '')
                accept_ranges = response.getheader('Accept-Ranges', 'none')
                if content_range:
                    start, end, length = tuple(int(x) for x in re.search(r'bytes (\d+)-(\d+)/(\d+)', content_range).group(1, 2, 3))
                else:
                    start, end, length = 0, content_length-1, content_length

                if content_length == 0:
                    https_manager.save_ssl_connection_for_reuse(response.ssl_sock)
                    return

                send_to_broswer = True
                while True:
                    data = response.read(8192) #TODO: loop read until timeout or except.
                    if not data:
                        response.close()
                        return
                    data_len = len(data)
                    start += data_len
                    if send_to_broswer:
                        try:
                            ret = self.wfile.write(data)
                            if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ:
                                logging.debug("self.wfile.write ret:%d", ret)
                                ret = self.wfile.write(data)
                        except Exception as e_b:
                            logging.exception('GAEProxyHandler.do_METHOD_AGENT send to browser %r return %r', self.path, e_b)
                            send_to_broswer = False

                    if start >= end:
                        https_manager.save_ssl_connection_for_reuse(response.ssl_sock)
                        return
            except Exception as e:
                logging.exception('GAEProxyHandler.do_METHOD_AGENT %r return %r', self.path, e)
                errors.append(e)
                if response:
                    response.close()
                if e.args[0] in (errno.ECONNABORTED, errno.EPIPE, errno.ECONNRESET):
                    #logging.debug('GAEProxyHandler.do_METHOD_AGENT return %r', e)
                    pass
                elif e.args[0] == errno.ETIMEDOUT or isinstance(e.args[0], str) and 'timed out' in e.args[0]:
                    if content_length and accept_ranges == 'bytes':
                        # we can retry range fetch here
                        logging.warn('GAEProxyHandler.do_METHOD_AGENT timed out, content_length=%r, url=%r, try again', content_length, self.path)
                        self.headers['Range'] = 'bytes=%d-%d' % (start, end)
                elif isinstance(e, NetWorkIOError) and 'bad write retry' in e.args[-1]:
                    logging.warn('GAEProxyHandler.do_METHOD_AGENT return %r, abort. url=%r ', e, self.path)
                    return
                else:
                    logging.exception('GAEProxyHandler.do_METHOD_AGENT %r return %r', self.path, e) #IOError(9, 'Bad file descriptor'), int(e.args[0])
Exemplo n.º 20
0
    def __fetchlet(self, range_queue, data_queue, range_delay_size):
        headers = dict((k.title(), v) for k, v in self.headers.items())
        headers['Connection'] = 'close'
        while not self._stopped:
            try:
                try:
                    start, end, response = range_queue.get(timeout=1)
                    if self.expect_begin < start and data_queue.qsize(
                    ) * self.bufsize + range_delay_size > 30 * 1024 * 1024:
                        range_queue.put((start, end, response))
                        time.sleep(10)
                        continue
                    headers['Range'] = 'bytes=%d-%d' % (start, end)
                    if not response:
                        response = fetch(self.method, self.url, headers,
                                         self.body)
                except Queue.Empty:
                    continue
                except Exception as e:
                    xlog.warning("RangeFetch fetch response %r in __fetchlet",
                                 e)
                    range_queue.put((start, end, None))
                    continue

                if not response:
                    xlog.warning('RangeFetch %s return %r', headers['Range'],
                                 response)
                    range_queue.put((start, end, None))
                    continue
                if response.app_status != 200:
                    xlog.warning('Range Fetch return %s "%s %s" %s ',
                                 response.app_status, self.method, self.url,
                                 headers['Range'])

                    if response.app_status == 404:
                        xlog.warning('APPID %r not exists, remove it.',
                                     response.ssl_sock.appid)
                        appid_manager.report_not_exist(response.ssl_sock.appid)
                        appid = appid_manager.get_appid()
                        if not appid:
                            xlog.error("no appid left")
                            self._stopped = True
                            response.close()
                            return

                    if response.app_status == 503:
                        xlog.warning(
                            'APPID %r out of Quota, remove it temporary.',
                            response.ssl_sock.appid)
                        appid_manager.report_out_of_quota(
                            response.ssl_sock.appid)
                        appid = appid_manager.get_appid()
                        if not appid:
                            xlog.error("no appid left")
                            self._stopped = True
                            response.close()
                            return

                    response.close()
                    range_queue.put((start, end, None))
                    continue

                if response.getheader('Location'):
                    self.url = urlparse.urljoin(self.url,
                                                response.getheader('Location'))
                    xlog.info('RangeFetch Redirect(%r)', self.url)
                    response.close()
                    range_queue.put((start, end, None))
                    continue

                if 200 <= response.status < 300:
                    content_range = response.getheader('Content-Range')
                    if not content_range:
                        xlog.warning(
                            'RangeFetch "%s %s" return Content-Range=%r: response headers=%r, retry %s-%s',
                            self.method, self.url, content_range,
                            response.getheaders(), start, end)
                        response.close()
                        range_queue.put((start, end, None))
                        continue
                    content_length = int(
                        response.getheader('Content-Length', 0))
                    xlog.info('>>>>>>>>>>>>>>> [thread %s] %s %s',
                              threading.currentThread().ident, content_length,
                              content_range)

                    time_last_read = time.time()
                    while start < end + 1:
                        try:
                            data = response.read(self.bufsize)
                            if not data:
                                if time.time() - time_last_read > 20:
                                    break
                                else:
                                    time.sleep(0.1)
                                    continue

                            time_last_read = time.time()
                            data_len = len(data)
                            data_queue.put((start, data))
                            start += data_len

                        except Exception as e:
                            xlog.warning('RangeFetch "%s %s" %s failed: %s',
                                         self.method, self.url,
                                         headers['Range'], e)
                            break

                    if start < end + 1:
                        xlog.warning('RangeFetch "%s %s" retry %s-%s',
                                     self.method, self.url, start, end)
                        response.close()
                        range_queue.put((start, end, None))
                        continue

                    https_manager.save_ssl_connection_for_reuse(
                        response.ssl_sock)
                    xlog.info('>>>>>>>>>>>>>>> Successfully reached %d bytes.',
                              start - 1)
                else:
                    xlog.error('RangeFetch %r return %s', self.url,
                               response.status)
                    response.close()
                    range_queue.put((start, end, None))
                    continue
            except StandardError as e:
                xlog.exception('RangeFetch._fetchlet error:%s', e)
                raise
Exemplo n.º 21
0
def handler(method, url, headers, body, wfile):
    time_request = time.time()

    headers = clean_empty_header(headers)
    errors = []
    response = None
    while True:
        if time.time() - time_request > 30:  #time out
            return return_fail_message(wfile)

        try:
            response = fetch(method, url, headers, body)
            if response.app_status != 200:
                xlog.warn("fetch gae status:%s url:%s", response.app_status,
                          url)

            if response.app_status == 404:
                xlog.warning('APPID %r not exists, remove it.',
                             response.ssl_sock.appid)
                appid_manager.report_not_exist(response.ssl_sock.appid)
                appid = appid_manager.get_appid()

                if not appid:
                    html = generate_message_html('404 No usable Appid Exists',
                                                 u'没有可用appid了,请配置可用的appid')
                    send_response(wfile, 404, body=html.encode('utf-8'))
                    response.close()
                    return
                else:
                    response.close()
                    continue

            if response.app_status == 403 or response.app_status == 405:  #Method not allowed
                # google have changed from gws to gvs, need to remove.
                xlog.warning('405 Method not allowed. remove %s ',
                             response.ssl_sock.ip)
                # some ip can connect, and server type is gws
                # but can't use as GAE server
                # so we need remove it immediately
                google_ip.report_connect_fail(response.ssl_sock.ip,
                                              force_remove=True)
                response.close()
                continue

            if response.app_status == 503:
                xlog.warning('APPID %r out of Quota, remove it.',
                             response.ssl_sock.appid)
                appid_manager.report_out_of_quota(response.ssl_sock.appid)
                appid = appid_manager.get_appid()

                if not appid:
                    html = generate_message_html('503 No usable Appid Exists',
                                                 u'appid流量不足,请增加appid')
                    send_response(wfile, 503, body=html.encode('utf-8'))
                    response.close()
                    return
                else:
                    response.close()
                    continue

            if response.app_status < 500:
                break

        except GAE_Exception as e:
            errors.append(e)
            xlog.warn("gae_exception:%r %s", e, url)
        except Exception as e:
            errors.append(e)
            xlog.exception('gae_handler.handler %r %s , retry...', e, url)

    if response.status == 206:
        return RangeFetch(method, url, headers, body, response, wfile).fetch()
    try:
        wfile.write("HTTP/1.1 %d %s\r\n" % (response.status, response.reason))
        response_headers = {}
        for key, value in response.getheaders():
            key = key.title()
            if key == 'Transfer-Encoding':
                #http://en.wikipedia.org/wiki/Chunked_transfer_encoding
                continue
            if key in skip_headers:
                continue
            response_headers[key] = value

        if 'X-Head-Content-Length' in response_headers:
            if method == "HEAD":
                response_headers['Content-Length'] = response_headers[
                    'X-Head-Content-Length']
            del response_headers['X-Head-Content-Length']

        send_to_browser = True
        try:
            for key in response_headers:
                value = response_headers[key]
                send_header(wfile, key, value)
                #logging.debug("Head- %s: %s", key, value)
            wfile.write("\r\n")
        except Exception as e:
            send_to_browser = False
            xlog.warn("gae_handler.handler send response fail. t:%d e:%r %s",
                      time.time() - time_request, e, url)

        if len(response.app_msg):
            xlog.warn("APPID error:%d url:%s", response.status, url)
            wfile.write(response.app_msg)
            response.close()
            return

        content_length = int(response.getheader('Content-Length', 0))
        content_range = response.getheader('Content-Range', '')
        if content_range:
            start, end, length = tuple(
                int(x) for x in re.search(r'bytes (\d+)-(\d+)/(\d+)',
                                          content_range).group(1, 2, 3))
        else:
            start, end, length = 0, content_length - 1, content_length

        last_read_time = time.time()
        while True:
            if start > end:
                https_manager.save_ssl_connection_for_reuse(response.ssl_sock)
                xlog.info("GAE t:%d s:%d %d %s",
                          (time.time() - time_request) * 1000, length,
                          response.status, url)
                return

            data = response.read(config.AUTORANGE_BUFSIZE)
            if not data:
                if time.time() - last_read_time > 20:
                    response.close()
                    xlog.warn("read timeout t:%d len:%d left:%d %s",
                              (time.time() - time_request) * 1000, length,
                              (end - start), url)
                    return
                else:
                    time.sleep(0.1)
                    continue

            last_read_time = time.time()
            data_len = len(data)
            start += data_len
            if send_to_browser:
                try:
                    ret = wfile.write(data)
                    if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ:
                        xlog.debug("send to browser wfile.write ret:%d", ret)
                        ret = wfile.write(data)
                except Exception as e_b:
                    if e_b[0] in (errno.ECONNABORTED, errno.EPIPE,
                                  errno.ECONNRESET
                                  ) or 'bad write retry' in repr(e_b):
                        xlog.warn('gae_handler send to browser return %r %r',
                                  e_b, url)
                    else:
                        xlog.warn('gae_handler send to browser return %r %r',
                                  e_b, url)
                    send_to_browser = False

    except NetWorkIOError as e:
        time_except = time.time()
        time_cost = time_except - time_request
        if e[0] in (errno.ECONNABORTED,
                    errno.EPIPE) or 'bad write retry' in repr(e):
            xlog.warn("gae_handler err:%r time:%d %s ", e, time_cost, url)
        else:
            xlog.exception("gae_handler except:%r %s", e, url)
    except Exception as e:
        xlog.exception("gae_handler except:%r %s", e, url)
Exemplo n.º 22
0
def handler(method, url, headers, body, wfile):
    time_request = time.time()

    errors = []
    response = None
    while True:
        if time.time() - time_request > 30:  # time out
            html = generate_message_html("504 GoAgent Proxy Time out", u"GoAgent代理处理超时,请查看日志!")
            send_response(wfile, 504, body=html.encode("utf-8"))
            return

        try:
            response = fetch(method, url, headers, body)
            if response.app_status != 200:
                logging.debug("fetch gae status:%s url:%s", response.app_status, url)

            if response.app_status == 404:
                logging.warning("APPID %r not exists, remove it.", response.ssl_sock.appid)
                appid_manager.report_not_exist(response.ssl_sock.appid)
                appid = appid_manager.get_appid()

                if not appid:
                    html = generate_message_html("404 No usable Appid Exists", u"没有可用appid了,请配置可用的appid")
                    send_response(wfile, 404, body=html.encode("utf-8"))
                    response.close()
                    return
                else:
                    response.close()
                    continue

            if response.app_status == 405:  # Method not allowed
                logging.warning("405 Method not allowed. remove %s ", response.ssl_sock.ip)
                google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True)
                response.close()
                continue

            if response.app_status == 503:
                logging.warning("APPID %r out of Quota, remove it.", response.ssl_sock.appid)
                appid_manager.report_out_of_quota(response.ssl_sock.appid)
                appid = appid_manager.get_appid()

                if not appid:
                    html = generate_message_html("503 No usable Appid Exists", u"appid流量不足,请增加appid")
                    send_response(wfile, 503, body=html.encode("utf-8"))
                    response.close()
                    return
                else:
                    response.close()
                    continue

            if response.app_status < 500:
                break

        except GAE_Exception as e:
            errors.append(e)
            logging.warn("gae_exception:%s %r", e, url)
        except Exception as e:
            errors.append(e)
            logging.exception("gae_handler.handler %r %s , retry...", e, url)

    if len(errors) == max_retry:
        if response and response.app_status >= 500:
            status = response.app_status
            headers = dict(response.getheaders())
            content = response.read()
        else:
            status = 502
            headers = {"Content-Type": "text/html"}
            content = generate_message_html(
                "502 URLFetch failed", "Local URLFetch %r failed" % url, "<br>".join(repr(x) for x in errors)
            )

        if response:
            response.close()

        send_response(wfile, status, headers, content.encode("utf-8"))

        logging.warn("GAE %d %s %s", status, method, url)
        return

    if response.status == 206:
        return RangeFetch(method, url, headers, body, response, wfile).fetch()

    try:
        wfile.write("HTTP/1.1 %d %s\r\n" % (response.status, response.reason))
        response_headers = {}
        for key, value in response.getheaders():
            key = key.title()
            if key == "Transfer-Encoding":
                # http://en.wikipedia.org/wiki/Chunked_transfer_encoding
                continue
            if key in skip_headers:
                continue
            response_headers[key] = value

        if "X-Head-Content-Length" in response_headers:
            if method == "HEAD":
                response_headers["Content-Length"] = response_headers["X-Head-Content-Length"]
            del response_headers["X-Head-Content-Length"]

        for key in response_headers:
            value = response_headers[key]
            send_header(wfile, key, value)
            # logging.debug("Head- %s: %s", key, value)
        wfile.write("\r\n")

        if len(response.app_msg):
            logging.warn("APPID error:%d url:%s", response.status, url)
            wfile.write(response.app_msg)
            response.close()
            return

        content_length = int(response.getheader("Content-Length", 0))
        content_range = response.getheader("Content-Range", "")
        if content_range:
            start, end, length = tuple(
                int(x) for x in re.search(r"bytes (\d+)-(\d+)/(\d+)", content_range).group(1, 2, 3)
            )
        else:
            start, end, length = 0, content_length - 1, content_length

        time_start = time.time()
        send_to_broswer = True
        while True:
            data = response.read(config.AUTORANGE_BUFSIZE)
            if not data and time.time() - time_start > 20:
                response.close()
                logging.warn(
                    "read timeout t:%d len:%d left:%d %s",
                    (time.time() - time_request) * 1000,
                    length,
                    (end - start),
                    url,
                )
                return

            data_len = len(data)
            start += data_len
            if send_to_broswer:
                try:
                    ret = wfile.write(data)
                    if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ:
                        logging.debug("send to browser wfile.write ret:%d", ret)
                        ret = wfile.write(data)
                except Exception as e_b:
                    if e_b[0] in (errno.ECONNABORTED, errno.EPIPE, errno.ECONNRESET) or "bad write retry" in repr(e_b):
                        logging.warn("gae_handler send to browser return %r %r", e_b, url)
                    else:
                        logging.warn("gae_handler send to browser return %r %r", e_b, url)
                    send_to_broswer = False

            if start >= end:
                https_manager.save_ssl_connection_for_reuse(response.ssl_sock)
                logging.info("GAE t:%d s:%d %d %s", (time.time() - time_request) * 1000, length, response.status, url)
                return

    except NetWorkIOError as e:
        if e[0] in (errno.ECONNABORTED, errno.EPIPE) or "bad write retry" in repr(e):
            logging.warn("gae_handler err:%r %s ", e, url)
        else:
            logging.exception("gae_handler except:%r %s", e, url)
    except Exception as e:
        logging.exception("gae_handler except:%r %s", e, url)
Exemplo n.º 23
0
    def do_AGENT(self):
        """GAE http urlfetch"""
        request_headers = dict((k.title(), v) for k, v in self.headers.items())
        host = request_headers.get('Host', '')
        path = self.parsed_url.path
        range_in_query = 'range=' in self.parsed_url.query
        special_range = (any(x(host) for x in config.AUTORANGE_HOSTS_MATCH)
                         or path.endswith(config.AUTORANGE_ENDSWITH)
                         ) and not path.endswith(config.AUTORANGE_NOENDSWITH)
        if 'Range' in request_headers:
            m = re.search(r'bytes=(\d+)-', request_headers['Range'])
            start = int(m.group(1) if m else 0)
            request_headers['Range'] = 'bytes=%d-%d' % (
                start, start + config.AUTORANGE_MAXSIZE - 1)
            logging.info('autorange range=%r match url=%r',
                         request_headers['Range'], self.path)
        elif not range_in_query and special_range:
            logging.info('Found [autorange]endswith match url=%r', self.path)
            m = re.search(r'bytes=(\d+)-', request_headers.get('Range', ''))
            start = int(m.group(1) if m else 0)
            request_headers['Range'] = 'bytes=%d-%d' % (
                start, start + config.AUTORANGE_MAXSIZE - 1)

        payload = b''
        if 'Content-Length' in request_headers:
            try:
                payload = self.rfile.read(
                    int(request_headers.get('Content-Length', 0)))
            except NetWorkIOError as e:
                logging.error('handle_method_urlfetch read payload failed:%s',
                              e)
                return

        response = None
        errors = []
        headers_sent = False

        for retry in range(config.FETCHMAX_LOCAL):
            try:
                content_length = 0
                kwargs = {}

                # TODO: test validate = 1
                kwargs['validate'] = 0

                time_start = time.time()
                response = gae_urlfetch(self.command, self.path,
                                        request_headers, payload, **kwargs)
                time_stop = time.time()
                time_cost = int((time_stop - time_start) * 1000)
                if not response:
                    if retry >= config.FETCHMAX_LOCAL - 1:
                        html = generate_message_html(
                            '502 URLFetch failed',
                            'Local URLFetch %r failed' % self.path,
                            str(errors))
                        self.wfile.write(
                            b'HTTP/1.0 502\r\nContent-Type: text/html\r\n\r\n'
                            + html.encode('utf-8'))
                        logging.warning('GET no response %s ', self.path)
                        return
                    else:
                        continue

                # appid not exists, try remove it from appid
                if response.app_status == 404:
                    logging.warning('APPID %r not exists, remove it.',
                                    response.ssl_sock.appid)
                    appid_manager.report_not_exist(response.ssl_sock.appid)
                    appid = appid_manager.get_appid()

                    if not appid:
                        html = generate_message_html(
                            '404 No usable Appid Exists',
                            u'没有可用appid了,请配置可用的appid')
                        self.wfile.write(
                            b'HTTP/1.0 404\r\nContent-Type: text/html\r\n\r\n'
                            + html.encode('utf-8'))
                        response.close()
                        return
                    else:
                        continue

                # appid over qouta, switch to next appid
                if response.app_status == 503:
                    logging.warning('APPID %r out of Quota, remove it.',
                                    response.ssl_sock.appid)
                    appid_manager.report_out_of_quota(response.ssl_sock.appid)
                    appid = appid_manager.get_appid()

                    if not appid:
                        html = generate_message_html(
                            '503 No usable Appid Exists',
                            u'appid流量不足,请增加appid')
                        self.wfile.write(
                            b'HTTP/1.0 503\r\nContent-Type: text/html\r\n\r\n'
                            + html.encode('utf-8'))
                        response.close()
                        return
                    else:
                        continue

                # 500 is Web server internal err
                if response.app_status == 500 and range_in_query and special_range:
                    logging.warning('500 with range in query'
                                    )  #, need trying another APPID?
                    response.close()
                    continue

                if response.app_status == 501:
                    deploy_url = "http://127.0.0.1:8085/?module=goagent&menu=deploy"
                    message = u'请重新部署服务端: <a href="%s">%s</a>' % (deploy_url,
                                                                  deploy_url)
                    html = generate_message_html(
                        'Please deploy your new server', message)
                    self.wfile.write(
                        b'HTTP/1.0 501\r\nContent-Type: text/html\r\n\r\n' +
                        html.encode('utf-8'))
                    logging.warning('501 Please deploy your new server'
                                    )  #, need trying another APPID?
                    response.close()
                    return

                if response.app_status != 200 and retry == config.FETCHMAX_LOCAL - 1:
                    logging.warn('GAE %s %s status:%s', self.command,
                                 self.path, response.status)
                    self.wfile.write(
                        ('HTTP/1.1 %s\r\n%s\r\n' % (response.status, ''.join(
                            '%s: %s\r\n' % (k.title(), v)
                            for k, v in response.getheaders()
                            if k.title() != 'Transfer-Encoding'))))
                    self.wfile.write(response.read())
                    response.close()
                    return

                # first response, has no retry.
                if not headers_sent:
                    data_size = response.getheader('Content-Length', '0')
                    download_speed = int(data_size) * 1000 / time_cost
                    logging.info(
                        '"GAE t:%d speed:%d len:%s status:%s %s %s HTTP/1.1"',
                        time_cost, download_speed,
                        response.getheader('Content-Length', '-'),
                        response.status, self.command, self.path)
                    if response.status == 206:
                        # 206 means "Partial Content"
                        rangefetch = RangeFetch(
                            self.wfile,
                            response,
                            self.command,
                            self.path,
                            self.headers,
                            payload,
                            maxsize=config.AUTORANGE_MAXSIZE,
                            bufsize=config.AUTORANGE_BUFSIZE,
                            waitsize=config.AUTORANGE_WAITSIZE,
                            threads=config.AUTORANGE_THREADS)
                        return rangefetch.fetch()
                    if response.getheader('Set-Cookie'):
                        response.msg['Set-Cookie'] = self.normcookie(
                            response.getheader('Set-Cookie'))
                    if response.getheader('Content-Disposition'
                                          ) and '"' not in response.getheader(
                                              'Content-Disposition'):
                        response.msg[
                            'Content-Disposition'] = self.normattachment(
                                response.getheader('Content-Disposition'))
                    headers_data = 'HTTP/1.1 %s\r\n%s\r\n' % (
                        response.status, ''.join(
                            '%s: %s\r\n' % (k.title(), v)
                            for k, v in response.getheaders()
                            if k.title() != 'Transfer-Encoding'))
                    #logging.debug('headers_data=%s', headers_data)
                    #self.wfile.write(headers_data.encode() if bytes is not str else headers_data)
                    self.wfile.write(headers_data)
                    headers_sent = True
                content_length = int(response.getheader('Content-Length', 0))
                content_range = response.getheader('Content-Range', '')
                accept_ranges = response.getheader('Accept-Ranges', 'none')
                if content_range:
                    start, end, length = tuple(
                        int(x)
                        for x in re.search(r'bytes (\d+)-(\d+)/(\d+)',
                                           content_range).group(1, 2, 3))
                else:
                    start, end, length = 0, content_length - 1, content_length

                if content_length == 0:
                    https_manager.save_ssl_connection_for_reuse(
                        response.ssl_sock)
                    return

                send_to_broswer = True
                while True:
                    data = response.read(
                        8192)  #TODO: loop read until timeout or except.
                    if not data:
                        response.close()
                        return
                    data_len = len(data)
                    start += data_len
                    if send_to_broswer:
                        try:
                            ret = self.wfile.write(data)
                            if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ:
                                logging.debug("self.wfile.write ret:%d", ret)
                                ret = self.wfile.write(data)
                        except Exception as e_b:
                            logging.exception(
                                'GAEProxyHandler.do_METHOD_AGENT send to browser %r return %r',
                                self.path, e_b)
                            send_to_broswer = False

                    if start >= end:
                        https_manager.save_ssl_connection_for_reuse(
                            response.ssl_sock)
                        return
            except Exception as e:
                logging.exception(
                    'GAEProxyHandler.do_METHOD_AGENT %r return %r', self.path,
                    e)
                errors.append(e)
                if response:
                    response.close()
                if e.args[0] in (errno.ECONNABORTED, errno.EPIPE,
                                 errno.ECONNRESET):
                    #logging.debug('GAEProxyHandler.do_METHOD_AGENT return %r', e)
                    pass
                elif e.args[0] == errno.ETIMEDOUT or isinstance(
                        e.args[0], str) and 'timed out' in e.args[0]:
                    if content_length and accept_ranges == 'bytes':
                        # we can retry range fetch here
                        logging.warn(
                            'GAEProxyHandler.do_METHOD_AGENT timed out, content_length=%r, url=%r, try again',
                            content_length, self.path)
                        self.headers['Range'] = 'bytes=%d-%d' % (start, end)
                elif isinstance(
                        e, NetWorkIOError) and 'bad write retry' in e.args[-1]:
                    logging.warn(
                        'GAEProxyHandler.do_METHOD_AGENT return %r, abort. url=%r ',
                        e, self.path)
                    return
                else:
                    logging.exception(
                        'GAEProxyHandler.do_METHOD_AGENT %r return %r',
                        self.path,
                        e)  #IOError(9, 'Bad file descriptor'), int(e.args[0])
Exemplo n.º 24
0
def handler(method, url, headers, body, wfile):
    time_request = time.time()

    errors = []
    response = None
    while True:
        if time.time() - time_request > 90: #time out
            return return_fail_message(wfile)

        try:
            response = fetch(method, url, headers, body)
            if response.app_status != 200:
                logging.warn("fetch gae status:%s url:%s", response.app_status, url)


            if response.app_status == 404:
                logging.warning('APPID %r not exists, remove it.', response.ssl_sock.appid)
                appid_manager.report_not_exist(response.ssl_sock.appid)
                appid = appid_manager.get_appid()

                if not appid:
                    html = generate_message_html('404 No usable Appid Exists', u'没有可用appid了,请配置可用的appid')
                    send_response(wfile, 404, body=html.encode('utf-8'))
                    response.close()
                    return
                else:
                    response.close()
                    continue

            if response.app_status == 403 or response.app_status == 405: #Method not allowed
                # google have changed from gws to gvs, need to remove.
                logging.warning('405 Method not allowed. remove %s ', response.ssl_sock.ip)
                google_ip.report_connect_fail(response.ssl_sock.ip, force_remove=True)
                response.close()
                continue

            if response.app_status == 503:
                logging.warning('APPID %r out of Quota, remove it.', response.ssl_sock.appid)
                appid_manager.report_out_of_quota(response.ssl_sock.appid)
                appid = appid_manager.get_appid()

                if not appid:
                    html = generate_message_html('503 No usable Appid Exists', u'appid流量不足,请增加appid')
                    send_response(wfile, 503, body=html.encode('utf-8'))
                    response.close()
                    return
                else:
                    response.close()
                    continue

            if response.app_status < 500:
                break

        except GAE_Exception as e:
            errors.append(e)
            logging.warn("gae_exception:%r %s", e, url)
        except Exception as e:
            errors.append(e)
            logging.exception('gae_handler.handler %r %s , retry...', e, url)


    if response.status == 206:
        return RangeFetch(method, url, headers, body, response, wfile).fetch()

    try:
        wfile.write("HTTP/1.1 %d %s\r\n" % (response.status, response.reason))
        response_headers = {}
        for key, value in response.getheaders():
            key = key.title()
            if key == 'Transfer-Encoding':
                #http://en.wikipedia.org/wiki/Chunked_transfer_encoding
                continue
            if key in skip_headers:
                continue
            response_headers[key] = value

        if 'X-Head-Content-Length' in response_headers:
            if method == "HEAD":
                response_headers['Content-Length'] = response_headers['X-Head-Content-Length']
            del response_headers['X-Head-Content-Length']

        for key in response_headers:
            value = response_headers[key]
            send_header(wfile, key, value)
            #logging.debug("Head- %s: %s", key, value)
        wfile.write("\r\n")

        if len(response.app_msg):
            logging.warn("APPID error:%d url:%s", response.status, url)
            wfile.write(response.app_msg)
            response.close()
            return

        content_length = int(response.getheader('Content-Length', 0))
        content_range = response.getheader('Content-Range', '')
        if content_range:
            start, end, length = tuple(int(x) for x in re.search(r'bytes (\d+)-(\d+)/(\d+)', content_range).group(1, 2, 3))
        else:
            start, end, length = 0, content_length-1, content_length


        send_to_broswer = True
        while True:
            time_start = time.time()
            data = response.read(config.AUTORANGE_BUFSIZE)
            if not data and time.time() - time_start > 20:
                response.close()
                logging.warn("read timeout t:%d len:%d left:%d %s", (time.time()-time_request)*1000, length, (end-start), url)
                return

            data_len = len(data)
            start += data_len
            if send_to_broswer:
                try:
                    ret = wfile.write(data)
                    if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ:
                        logging.debug("send to browser wfile.write ret:%d", ret)
                        ret = wfile.write(data)
                except Exception as e_b:
                    if e_b[0] in (errno.ECONNABORTED, errno.EPIPE, errno.ECONNRESET) or 'bad write retry' in repr(e_b):
                        logging.warn('gae_handler send to browser return %r %r', e_b, url)
                    else:
                        logging.warn('gae_handler send to browser return %r %r', e_b, url)
                    send_to_broswer = False

            if start >= end:
                https_manager.save_ssl_connection_for_reuse(response.ssl_sock)
                logging.info("GAE t:%d s:%d %d %s", (time.time()-time_request)*1000, length, response.status, url)
                return

    except NetWorkIOError as e:
        time_except = time.time()
        time_cost = time_except - time_request
        if e[0] in (errno.ECONNABORTED, errno.EPIPE) or 'bad write retry' in repr(e):
            logging.warn("gae_handler err:%r time:%d %s ", e, time_cost, url)
        else:
            logging.exception("gae_handler except:%r %s", e, url)
    except Exception as e:
        logging.exception("gae_handler except:%r %s", e, url)
Exemplo n.º 25
0
def handler(method, url, headers, body, wfile):
    time_request = time.time()

    errors = []
    response = None
    for i in xrange(max_retry):
        try:
            response = fetch(method, url, headers, body)
            if response.app_status != 200:
                logging.debug("fetch gae status:%d url:%s",
                              response.app_status, url)

            if response.app_status == 404:
                logging.warning('APPID %r not exists, remove it.',
                                response.ssl_sock.appid)
                appid_manager.report_not_exist(response.ssl_sock.appid)
                appid = appid_manager.get_appid()

                if not appid:
                    html = generate_message_html('404 No usable Appid Exists',
                                                 u'没有可用appid了,请配置可用的appid')
                    send_response(wfile, 404, body=html.encode('utf-8'))
                    response.close()
                    return
                else:
                    response.close()
                    continue

            if response.app_status == 503:
                logging.warning('APPID %r out of Quota, remove it.',
                                response.ssl_sock.appid)
                appid_manager.report_out_of_quota(response.ssl_sock.appid)
                appid = appid_manager.get_appid()

                if not appid:
                    html = generate_message_html('503 No usable Appid Exists',
                                                 u'appid流量不足,请增加appid')
                    send_response(wfile, 503, body=html.encode('utf-8'))
                    response.close()
                    return
                else:
                    response.close()
                    continue

            if response.app_status < 500:
                break

        except GAE_Exception as e:
            errors.append(e)
            logging.warn("gae_exception:%s %r", e, url)
        except Exception as e:
            errors.append(e)
            logging.exception('gae_handler.handler %r %s , retry...', e, url)

    if len(errors) == max_retry:
        if response and response.app_status >= 500:
            status = response.app_status
            headers = dict(response.getheaders())
            content = response.read()
        else:
            status = 502
            headers = {'Content-Type': 'text/html'}
            content = generate_message_html(
                '502 URLFetch failed', 'Local URLFetch %r failed' % url,
                '<br>'.join(repr(x) for x in errors))

        if response:
            response.close()

        send_response(wfile, status, headers, content.encode('utf-8'))

        logging.warn("GAE %d %s %s", status, method, url)
        return

    if response.status == 206:
        return RangeFetch(method, url, headers, body, response, wfile).fetch()

    try:

        wfile.write("HTTP/1.1 %d\r\n" % response.status)
        response_headers = {}
        for key, value in response.getheaders():
            key = key.title()
            if key == 'Transfer-Encoding':
                continue
            if key in skip_headers:
                continue
            response_headers[key] = value

        if method == "HEAD":
            if 'X-Head-Content-Length' in response_headers:
                response_headers['Content-Length'] = response_headers[
                    'X-Head-Content-Length']
                del response_headers['X-Head-Content-Length']

        for key in response_headers:
            value = response_headers[key]
            send_header(wfile, key, value)
            #logging.debug("Head- %s: %s", key, value)
        wfile.write("\r\n")

        if len(response.app_msg):
            logging.warn("APPID error:%d url:%s", response.status, url)
            wfile.write(response.app_msg)
            response.close()
            return

        content_length = int(response.getheader('Content-Length', 0))
        content_range = response.getheader('Content-Range', '')
        if content_range:
            start, end, length = tuple(
                int(x) for x in re.search(r'bytes (\d+)-(\d+)/(\d+)',
                                          content_range).group(1, 2, 3))
        else:
            start, end, length = 0, content_length - 1, content_length

        time_start = time.time()
        send_to_broswer = True
        while True:
            data = response.read(config.AUTORANGE_BUFSIZE)
            if not data and time.time() - time_start > 20:
                response.close()
                logging.warn("read timeout t:%d len:%d left:%d %s",
                             (time.time() - time_request) * 1000, length,
                             (end - start), url)
                return

            data_len = len(data)
            start += data_len
            if send_to_broswer:
                try:
                    ret = wfile.write(data)
                    if ret == ssl.SSL_ERROR_WANT_WRITE or ret == ssl.SSL_ERROR_WANT_READ:
                        logging.debug("send to browser wfile.write ret:%d",
                                      ret)
                        ret = wfile.write(data)
                except Exception as e_b:
                    if e_b[0] in (errno.ECONNABORTED, errno.EPIPE,
                                  errno.ECONNRESET
                                  ) or 'bad write retry' in repr(e_b):
                        logging.warn(
                            'gae_handler send to browser return %r %r', e_b,
                            url)
                    else:
                        logging.warn(
                            'gae_handler send to browser return %r %r', e_b,
                            url)
                    send_to_broswer = False

            if start >= end:
                https_manager.save_ssl_connection_for_reuse(response.ssl_sock)
                logging.info("GAE t:%d s:%d %d %s",
                             (time.time() - time_request) * 1000, length,
                             response.status, url)
                return

    except NetWorkIOError as e:
        if e[0] in (errno.ECONNABORTED,
                    errno.EPIPE) or 'bad write retry' in repr(e):
            logging.warn("gae_handler err:%r %s ", e, url)
        else:
            logging.exception("gae_handler except:%r %s", e, url)
    except Exception as e:
        logging.exception("gae_handler except:%r %s", e, url)