Exemplo n.º 1
0
async def handler(conn):
    print(conn)

    request = await readall_from_socket(conn)
    # print(request)

    http = HTTP()
    parser = HttpRequestParser(http)
    parser.feed_data(request)
    method = parser.get_method().decode()
    url_path = http.url
    print(method, url_path)

    # Attempt 1
    # with open('requirements.lock', 'rb') as f:
    #     # only python3.5+, and do not support non-bloking socket
    #     conn.sendfile(f)

    # Attempt 2
    fl = None
    for mp in mapping:
        fl = mp.file(url_path or '')
        if fl:
            break
    if fl:
        if fl.exists():
            filepath = fl.path
            if fl.is_file():
                with open(filepath, 'rb') as f:
                    blocksize = os.path.getsize(filepath)
                    conn.send(b'HTTP/1.1 200 OK\r\n')
                    conn.send(f'Content-Length: {blocksize}\r\n'.encode('ascii'))
                    mime = mimetypes.guess_type(filepath)[0]
                    # mime = "text/plain" if mime else "application/octet-stream"
                    mime = mime or "application/octet-stream"
                    conn.send(
                        f'Content-Type: {mime}; charset=utf-8\r\n'.encode('ascii')
                    )
                    # conn.send(b'Transfer-Encoding: chunked')
                    conn.send(b'\r\n')
                    _ = sendfile(conn.fileno(), f.fileno(), 0, blocksize)
            elif fl.is_dir():
                files = fl.listdir()
                body = '<br/>'.join(
                    f'<a href="{url_path.rstrip("/")}/{x.basename}{"/" if x.is_dir() else ""}">{x.basename}{"/" if x.is_dir() else ""}</a>'
                    for x in files
                ).encode('utf8')
                conn.send(b'HTTP/1.1 200 OK\r\n')
                conn.send(f'Content-Length: {len(body)}\r\n'.encode('ascii'))
                conn.send(b'Content-Type: text/html; charset=utf-8\r\n')
                conn.send(b'\r\n')
                conn.sendall(body)

    conn.send(b'HTTP/1.1 404 Not Found\r\n')
    conn.send(b'Content-Type: text/plain; charset=utf-8\r\n')
    conn.send(b'\r\n')
    conn.sendall(b'Not Found')
    conn.close()
Exemplo n.º 2
0
    async def _parse_request(self, request_reader: asyncio.StreamReader,
                             response_writer: asyncio.StreamWriter) -> Request:
        """parse data from StreamReader and build the request object
        """
        limit = 2**16
        req = Request()
        parser = HttpRequestParser(req)

        while True:
            data = await request_reader.read(limit)
            parser.feed_data(data)
            if req.finished or not data:
                break
            elif req.needs_write_continue:
                response_writer.write(b'HTTP/1.1 100 (Continue)\r\n\r\n')
                req.reset_state()

        req.method = touni(parser.get_method()).upper()
        return req
Exemplo n.º 3
0
 def handle_request(self, data: bytes):
     request = _PRequest()
     parser = HttpRequestParser(request)
     try:
         parser.feed_data(data)
     except HttpParserInvalidMethodError:
         raise ParseError()
     if request.ready:
         return (
             b"",
             (
                 Request(
                     path=request.path,
                     method=parser.get_method(),
                     headers=tuple(request.headers),
                     body=request.body,
                     protocol=b"HTTP/" + parser.get_http_version().encode(),
                 ),
             ),
             False,
         )
     raise ParseError()
Exemplo n.º 4
0
    async def _parse_request(self, request_reader, response_writer):
        limit = 2 ** 16
        req = Request()
        parser = HttpRequestParser(req)

        while True:
            data = await request_reader.read(limit)
            parser.feed_data(data)
            if req.finished or not data:
                break
            elif req.needs_write_continue:
                response_writer.write(b'HTTP/1.1 100 (Continue)\r\n\r\n')
                req.reset_state()

        if req.path is None:
            # connected without a formed HTTP request
            return

        handler, args = self.get_handler(req.path)

        req.method = parser.get_method().decode().upper()
        req.args = args
        return req, handler
Exemplo n.º 5
0
class _HTTPServerProtocol(asyncio.Protocol):
    """ HTTP Protocol handler.
        Should only be used by HTTPServerTransport
    """
    __slots__ = ('_parent', '_transport', 'data', 'http_parser',
                 'request')

    def __init__(self, *, parent, loop):
        self._parent = parent
        self._transport = None
        self.data = None
        self.http_parser = HttpRequestParser(self)
        self.request = None
        self._loop = loop

    """ The next 3 methods are for asyncio.Protocol handling """
    def connection_made(self, transport):
        self._transport = transport
        self._parent._connections.add(self)

    def connection_lost(self, exc):
        self._parent._connections.discard(self)

    def data_received(self, data):
        try:
            self.http_parser.feed_data(data)
        except HttpParserError as e:
            traceback.print_exc()
            print(self.request.__dict__)
            self.send_response(Response(status=400,
                                        body={'reason': 'Invalid HTTP',
                                              'details': str(e)}))

    """ 
    The following methods are for HTTP parsing (from httptools)
    """
    def on_message_begin(self):
        self.request = Request()
        self.data = b''

    def on_header(self, name, value):
        key = name.decode('ASCII').lower()
        val = value.decode()
        self.request.headers[key] = val
        if key == 'x-correlation-id':
            self.request.correlation_id = val
        if key == 'content-type':
            self.request.content_type = val

    def on_headers_complete(self):
        self.request.method = self.http_parser.get_method().decode('ASCII')

    def on_body(self, body: bytes):
        self.data += body

    def on_message_complete(self):
        self.request.body = self.data
        task = self._loop.create_task(
            self._parent.handle_incoming_request(self.request)
        )
        task.add_done_callback(self.handle_response)

    def on_url(self, url):
        url = parse_url(url)
        if url.query:
            self.request.query_string = url.query.decode('ASCII')
        self.request.path = url.path.decode('ASCII')

    """
    End parsing methods
    """

    def handle_response(self, future):
        try:
            self.send_response(future.result())
        except Exception:
            traceback.print_exc()
            self.send_response(
                Response(status=500,
                         body={'reason': 'Something really bad happened'}))

    def send_response(self, response):
        headers = 'HTTP/1.1 {status_code} {status_message}\r\n'.format(
            status_code=response.status.value,
            status_message=response.status.phrase,
        )
        if self._parent.shutting_down:
            headers += 'Connection: close\r\n'
        else:
            headers += 'Connection: keep-alive\r\n'

        if response.data:
            headers += 'Content-Type: {}\r\n'.format(response.content_type)
            headers += 'Content-Length: {}\r\n'.format(len(response.data))
            if ('transfer-encoding' in response.headers or
                        'Transfer-Encoding' in response.headers):
                print('Httptoolstransport currently doesnt support '
                      'chunked mode, attempting without.')
                response.headers.pop('transfer-encoding', None)
                response.headers.pop('Transfer-Encoding', None)
        else:
            headers += 'Content-Length: {}\r\n'.format(0)
        for header, value in response.headers.items():
            headers += '{header}: {value}\r\n'.format(header=header,
                                                      value=value)

        result = headers.encode('ASCII') + b'\r\n'
        if response.data:
            result += response.data

        self._transport.write(result)
        self.request = 0
        self.data = 0

    def attempt_close(self):
        if self.request == 0:
            self._transport.close()
Exemplo n.º 6
0
class Connection(object):
    def __init__(self, sock, addr, app):
        self._sock = sock
        # addr = (hostaddr, port)
        self._addr = addr
        self._fd = self._sock.fileno()
        self.parser = None
        self.url = None
        self.headers = None
        self._header_fragment = b""
        self.request = None

        self.app = app
        self.has_sent_size = 0
        self.send_body_size = 0
        self._read_buffer = bytearray()
        self._write_buffer = bytes()

    # - on_message_begin()
    # - on_url(url: bytes)
    # - on_header(name: bytes, value: bytes)
    # - on_headers_complete()
    # - on_body(body: bytes)
    # - on_message_complete()
    # - on_chunk_header()
    # - on_chunk_complete()
    # - on_status(status: bytes)

    def get_address(self):
        return self._addr

    def close(self):
        KBEngine.deregisterReadFileDescriptor(self._fd)
        self._sock.close()
        self._fd = 0
        self._sock = None
        self.cleanup()

    def cleanup(self):
        self.request = None
        self.parser = None
        self.headers = []
        self.app = None
        self._read_buffer.clear()
        self._write_buffer = None

    def data_received(self, fd):
        # DEBUG_MSG("data_received, fd: %s, id: %s" % (fd, id(self)))
        if self.parser is None:
            self.parser = HttpRequestParser(self)
            self.headers = []

        while True:
            try:
                data = self._sock.recv(4096)
                # DEBUG_MSG("data_received, fd: %s, data len: %s" %
                #           (fd, len(data)))
                # 客户端关闭了链接
                if not data:
                    ERROR_MSG("data_received, data len is 0, close")
                    self.close()
                    return
                self._read_buffer += data
            except (socket.error, IOError, OSError) as e:
                _errno = errno_from_exception(e)
                # 系统调用被signal中断
                if _errno == errno.EINTR:
                    continue
                # 此次的recv数据读完了,recv抛出下面的异常
                # 此次recv数据读完了,但不表示该连接的一次发包数据读完了,一次发包可能
                # 触发多次epoll 事件
                elif _errno in ERRNO_WOULDBLOCK:
                    DEBUG_MSG("data_received, done")
                    break

                ERROR_MSG("socket recv error: %s" % str(e))
                self.close()
                return
            except Exception as e:
                ERROR_MSG("data_received exception, e: %s" % str(e))
                return

        if self._read_buffer:
            try:
                self.parser.feed_data(bytes(self._read_buffer))
                # 注意要在feed之后清,这次请求可能会被feed 多次,因此每次feed 完要清掉
                self._read_buffer.clear()
            except HttpParserError as e:
                ERROR_MSG(
                    "Connection::data_received feed_data error. error: %s"
                    " \n %s \n id: %s" %
                    (str(e), traceback.format_exc(), id(self)))
                # ERROR_MSG(
                #     "Connection::data_received feed_data error. read_buffer: %s"
                #     " \n" % str(bytes(self._read_buffer))
                #
                # )
                # TODO 给客户端回错误

    def on_url(self, url):
        if not self.url:
            self.url = url
        else:
            self.url += url

    def on_header(self, key, value):
        """
        :param key:
        :param value: bytes
        :return:
        """
        self._header_fragment += key
        if value is not None:
            try:
                value = value.decode()
            except UnicodeDecodeError:
                value = value.decode("latin_1")

            self.headers.append(
                (self._header_fragment.decode().casefold(), value))

            self._header_fragment = b""

    def on_headers_complete(self):
        DEBUG_MSG("Connection::on_header_complete")
        self.request = Request(
            url_bytes=self.url,
            headers=CIMultiDict(self.headers),
            version=self.parser.get_http_version(),
            method=self.parser.get_method().decode(),
            connection=self,
        )

    def on_body(self, data):
        DEBUG_MSG("Connection::on_body, data len: %s" % len(data))
        # DEBUG_MSG("data: %s" % str(data))
        self.request.body.append(data)

    def on_message_complete(self):
        INFO_MSG("Connection request: %s" % str(self.request))
        self.request.body = b"".join(self.request.body)
        self.handle_request()

    def handle_request(self):
        request_handler_class = self.app.router.get(self.request.path)
        if not request_handler_class:
            response = HTTPResponse("NotFound handler", status=500)
            self.write_response(response)
            return

        try:
            handler = request_handler_class(self.request)
            # TODO url 参数等数据后面加上
            handler.execute_handler()
        except BaseException_ as e:
            response = HTTPResponse("An error occurred, error: %s" % str(e),
                                    status=e.status_code or 500)
            self.write_response(response)
        except Exception as e:
            ERROR_MSG("Internal Server Error: %s" % str(e))
            response = HTTPResponse("Internal Server Error", status=500)
            self.write_response(response)

    def write_response(self, response):
        self._write_buffer = response.output()
        self.send_body_size = len(self._write_buffer)
        self._write_to_fd(False, self._fd)
        if self._check_send_finish():
            self.write_completed()
        else:
            # 一次没有发完,注册fd,交给epoll
            DEBUG_MSG("Connection::register write fd: %s" % self._fd)
            KBEngine.registerWriteFileDescriptor(
                self._fd, Functor(self._write_to_fd, True))

    def _write_to_fd(self, in_poller, fd):
        try:
            send_size = self._sock.send(
                self._write_buffer[self.has_sent_size:])
            self.has_sent_size += send_size
            DEBUG_MSG("_write_to_fd, in_poller: %s, fd: %s" % (in_poller, fd))
            if in_poller and self._check_send_finish():
                self.write_completed(in_poller)
        except (socket.error, IOError, OSError) as e:
            ERROR_MSG("write to fd error: %s" % str(e))
            self.close()

    def _check_send_finish(self):
        return self.has_sent_size >= self.send_body_size

    def write_completed(self, in_poller=False):
        if in_poller:
            KBEngine.deregisterWriteFileDescriptor(self._fd)

        DEBUG_MSG("write_completed, in_poller: %s, fd: %s" %
                  (in_poller, self._fd))
        self.close()
Exemplo n.º 7
0
class HttpProtocol(asyncio.Protocol):
    __slots__ = (
        # event loop, connection
        'loop',
        'transport',
        'connections',
        'signal',
        # request params
        'parser',
        'url',
        'headers',
        # request config
        'request_handler',
        'request_timeout',
        'request_max_size',
        # enable or disable access log / error log purpose
        'has_log',
        'log',
        'netlog',
        # connection management
        '_is_upgrade',
        '_total_request_size',
        '_timeout_handler',
        '_last_request_time',
        'body_channel',
        'message')

    def __init__(self,
                 *,
                 loop,
                 request_handler: Awaitable,
                 log=None,
                 signal=None,
                 connections=set(),
                 request_timeout=60,
                 request_max_size=None,
                 has_log=True,
                 keep_alive=True,
                 netlog=None):
        '''signal is shared'''
        self.loop = loop
        self.transport = None
        self.parser = None
        self.url = None
        self.headers = None
        self.body_channel = None
        self.message = None
        self.signal = signal
        self.has_log = has_log
        self.log = log
        self.netlog = netlog
        self.connections = connections
        self.request_handler = request_handler
        self.request_timeout = request_timeout
        self.request_max_size = request_max_size
        self._total_request_size = 0
        self._timeout_handler = None
        self._last_request_time = None
        self._request_handler_task = None
        self._request_stream_task = None
        self._is_upgrade = False
        # config.KEEP_ALIVE or not check_headers()['connection_close']
        self._keep_alive = keep_alive

    @property
    def keep_alive(self):
        return (self._keep_alive and not self.signal.stopped and self.parser
                and self.parser.should_keep_alive())

    # -------------------------------------------- #
    # Connection
    # -------------------------------------------- #

    def connection_made(self, transport):
        self.connections.add(self)
        self._timeout_handler = self.loop.call_later(self.request_timeout,
                                                     self.connection_timeout)
        self.transport = transport
        self._last_request_time = get_current_time()

    def connection_lost(self, exc):
        self.connections.discard(self)
        self._timeout_handler.cancel()

    def connection_timeout(self):
        # Check if
        time_elapsed = get_current_time() - self._last_request_time
        if time_elapsed < self.request_timeout:
            time_left = self.request_timeout - time_elapsed
            self._timeout_handler = (self.loop.call_later(
                time_left, self.connection_timeout))
        else:
            if self._request_stream_task:
                self._request_stream_task.cancel()
            if self._request_handler_task:
                self._request_handler_task.cancel()
            exception = (408, 'Request Timeout')
            self.write_error(exception)

    # -------------------------------------------- #
    # Parsing
    # -------------------------------------------- #

    def data_received(self, data: bytes):
        # Check for the request itself getting too large and exceeding
        # memory limits
        self._total_request_size += len(data)
        if self._total_request_size > self.request_max_size:
            exception = (413, 'Payload Too Large')
            self.write_error(exception)

        # Create parser if this is the first time we're receiving data
        if self.parser is None:
            self.headers = []
            self.parser = HttpRequestParser(self)

        # Parse request chunk or close connection
        try:
            self.parser.feed_data(data)
        except HttpParserUpgrade:
            upgrade_to_websocket(self)
        except HttpParserError:
            exception = (400, 'Bad Request')
            self.write_error(exception)

    def on_url(self, url: bytes):
        self.url = url

    def on_header(self, name: bytes, value: bytes):
        # for websocket
        name = name.lower()
        if name == b'content-length' and int(value) > self.request_max_size:
            exception = (413, 'Payload Too Large')
            self.write_error(exception)
        if name == b'upgrade':
            self._is_upgrade = True
        self.headers.append([name, value])

    def on_headers_complete(self):
        if self._is_upgrade:
            return
        channels = {}
        self.message = self.get_message(self.transport,
                                        self.parser.get_http_version(),
                                        self.parser.get_method(), self.url,
                                        self.headers)
        channels['body'] = BodyChannel(self.transport)
        channels['reply'] = ReplyChannel(self)
        self.body_channel = channels['body']
        self._request_handler_task = self.loop.create_task(
            self.request_handler(self.message, channels))

    def on_body(self, body: bytes):
        if self._is_upgrade:
            return
        body_chunk = self.get_request_body_chunk(body, False, True)
        self._request_stream_task = self.loop.create_task(
            self.body_channel.send(body_chunk))

    def on_message_complete(self):
        if self._is_upgrade:
            return
        body_chunk = self.get_request_body_chunk(b'', False, False)
        self._request_stream_task = self.loop.create_task(
            self.body_channel.send(body_chunk))

    def get_request_body_chunk(self, content: bytes, closed: bool,
                               more_content: bool) -> Dict[str, Any]:
        '''
        http://channels.readthedocs.io/en/stable/asgi/www.html#request-body-chunk
        '''
        return {
            'content': content,
            'closed': closed,
            'more_content': more_content
        }

    def get_message(self, transport, http_version: str, method: bytes,
                    url: bytes, headers: List[List[bytes]]) -> Dict[str, Any]:
        '''
        http://channels.readthedocs.io/en/stable/asgi/www.html#request
        '''
        url_obj = parse_url(url)
        if url_obj.schema is None:
            if transport.get_extra_info('sslcontext'):
                scheme = 'https'
            else:
                scheme = 'http'
        else:
            scheme = url_obj.schema.decode()
        path = '' if url_obj.path is None else url_obj.path.decode('utf-8')
        query = b'' if url_obj.query is None else url_obj.query
        return {
            'channel': 'http.request',
            'reply_channel': None,
            'http_version': http_version,
            'method': method.decode(),
            'scheme': scheme,
            'path': path,
            'query_string': query,
            'root_path': '',
            'headers': headers,
            'body': b'',
            'body_channel': None,
            'client': transport.get_extra_info('peername'),
            'server': transport.get_extra_info('sockname')
        }

    def check_headers(self, headers: List[List[bytes]]) -> Dict[str, bool]:
        connection_close = False
        content_length = False
        for key, value in headers:
            if key == b'Connection' and value == b'close':
                connection_close = True
            if key == b'Content-Length':
                content_length = True
        return {
            'connection_close': connection_close,
            'content_length': content_length
        }

    def after_write(self, more_content, keep_alive):
        if not more_content and not keep_alive:
            self.transport.close()
        elif not more_content and keep_alive:
            self._last_request_time = get_current_time()
            self.cleanup()

    def is_response_chunk(self, message: Dict[str, Any]) -> bool:
        return 'status' not in message and 'headers' not in message

    def make_header_content(self, headers, result_headers, content,
                            more_content):
        header_content = b''
        if headers is not None:
            _header_content = []
            if not more_content and not result_headers['content_length']:
                _header_content.extend(
                    [b'Content-Length: ',
                     str(len(content)).encode(), b'\r\n'])
            for key, value in headers:
                if key == b'Connection':
                    continue
                _header_content.extend([key, b': ', value, b'\r\n'])
            header_content = b''.join(_header_content)
        return header_content

    def send(self, message: Dict[str, Any]):
        transport = self.transport
        status = message.get('status')
        headers = message.get('headers')
        content = message.get('content')
        more_content = message.get('more_content', False)
        if headers is not None:
            result_headers = self.check_headers(headers)
            if result_headers['connection_close'] is True:
                self._keep_alive = False
        keep_alive = self.keep_alive

        if self.is_response_chunk(message):
            content_length = len(content)
            if more_content and content_length > 0:
                transport.write(b'%x\r\n%b\r\n' % (content_length, content))
                self.after_write(more_content, keep_alive)
            elif more_content is False:
                transport.write(b'0\r\n\r\n')
                self.after_write(more_content, keep_alive)
            return

        keep_alive_timeout = self.request_timeout
        timeout_header = b''
        if keep_alive and keep_alive_timeout is not None:
            timeout_header = b'Keep-Alive: %d\r\n' % keep_alive_timeout

        header_content = self.make_header_content(headers, result_headers,
                                                  content, more_content)

        response = (b'HTTP/1.1 %d %b\r\n'
                    b'Connection: %b\r\n'
                    b'%b'
                    b'%b\r\n'
                    b'%b') % (status, ALL_STATUS_CODES[status],
                              b'keep-alive' if keep_alive else b'close',
                              timeout_header, header_content, content)
        transport.write(response)
        self.after_write(more_content, keep_alive)

    def write_error(self, exception):
        try:
            status, content = exception
            content = 'Error: {}'.format(content).encode()
            headers = []
            self.send({
                'status': status,
                'headers': headers,
                'content': content,
                'more_content': False
            })
        except RuntimeError:
            self.log.error('Connection lost before error written.')
        except Exception as e:
            self.bail_out('Writing error failed, connection closed {}'.format(
                repr(e)),
                          from_error=True)
        finally:
            self.transport.close()

    def bail_out(self, message, from_error=False):
        if from_error or self.transport.is_closing():
            self.log.error(('Transport closed @ {} and exception '
                            'experienced during error handling').format(
                                self.transport.get_extra_info('peername')))
            self.log.debug('Exception:\n{}'.format(traceback.format_exc()))
        else:
            exception = (500, message)
            self.write_error(exception)
            self.log.error(message)

    def cleanup(self):
        self.parser = None
        self.url = None
        self.headers = None
        self._request_handler_task = None
        self._request_stream_task = None
        self._total_request_size = 0
        self.body_channel = None
        self.message = None

    def close_if_idle(self):
        '''Close the connection if a request is not being sent or received

        :return: boolean - True if closed, false if staying open
        '''
        if not self.parser:
            self.transport.close()
            return True
        return False
Exemplo n.º 8
0
class HttpProtocol(asyncio.Protocol):
    __slots__ = (
        # event loop, connection
        'loop', 'transport', 'connections', 'signal',
        # request params
        'parser', 'request', 'url', 'headers',
        # request config
        'request_handler', 'request_timeout', 'request_max_size',
        # connection management
        '_total_request_size', '_timeout_handler', '_last_communication_time')

    def __init__(self, *, loop, request_handler, error_handler,
                 signal=Signal(), connections=set(), request_timeout=60,
                 request_max_size=None):
        self.loop = loop
        self.transport = None
        self.request = None
        self.parser = None
        self.url = None
        self.headers = None
        self.signal = signal
        self.connections = connections
        self.request_handler = request_handler
        self.error_handler = error_handler
        self.request_timeout = request_timeout
        self.request_max_size = request_max_size
        self._total_request_size = 0
        self._timeout_handler = None
        self._last_request_time = None
        self._request_handler_task = None

    # -------------------------------------------- #
    # Connection
    # -------------------------------------------- #

    def connection_made(self, transport):
        self.connections.add(self)
        self._timeout_handler = self.loop.call_later(
            self.request_timeout, self.connection_timeout)
        self.transport = transport
        self._last_request_time = current_time

    def connection_lost(self, exc):
        self.connections.discard(self)
        self._timeout_handler.cancel()

    def connection_timeout(self):
        # Check if
        time_elapsed = current_time - self._last_request_time
        if time_elapsed < self.request_timeout:
            time_left = self.request_timeout - time_elapsed
            self._timeout_handler = (
                self.loop.call_later(time_left, self.connection_timeout))
        else:
            if self._request_handler_task:
                self._request_handler_task.cancel()
            exception = RequestTimeout('Request Timeout')
            self.write_error(exception)

    # -------------------------------------------- #
    # Parsing
    # -------------------------------------------- #

    def data_received(self, data):
        # Check for the request itself getting too large and exceeding
        # memory limits
        self._total_request_size += len(data)
        if self._total_request_size > self.request_max_size:
            exception = PayloadTooLarge('Payload Too Large')
            self.write_error(exception)

        # Create parser if this is the first time we're receiving data
        if self.parser is None:
            assert self.request is None
            self.headers = []
            self.parser = HttpRequestParser(self)

        # Parse request chunk or close connection
        try:
            self.parser.feed_data(data)
        except HttpParserError:
            exception = InvalidUsage('Bad Request')
            self.write_error(exception)

    def on_url(self, url):
        self.url = url

    def on_header(self, name, value):
        if name == b'Content-Length' and int(value) > self.request_max_size:
            exception = PayloadTooLarge('Payload Too Large')
            self.write_error(exception)

        self.headers.append((name.decode().casefold(), value.decode()))

    def on_headers_complete(self):
        self.request = Request(
            url_bytes=self.url,
            headers=CIDict(self.headers),
            version=self.parser.get_http_version(),
            method=self.parser.get_method().decode(),
            transport=self.transport
        )

    def on_body(self, body):
        self.request.body.append(body)

    def on_message_complete(self):
        if self.request.body:
            self.request.body = b''.join(self.request.body)

        self._request_handler_task = self.loop.create_task(
            self.request_handler(
                self.request,
                self.write_response,
                self.stream_response))

    # -------------------------------------------- #
    # Responding
    # -------------------------------------------- #
    def write_response(self, response):
        """
        Writes response content synchronously to the transport.
        """
        try:
            keep_alive = (
                self.parser.should_keep_alive() and not self.signal.stopped)

            self.transport.write(
                response.output(
                    self.request.version, keep_alive,
                    self.request_timeout))
        except AttributeError:
            log.error(
                ('Invalid response object for url {}, '
                 'Expected Type: HTTPResponse, Actual Type: {}').format(
                    self.url, type(response)))
            self.write_error(ServerError('Invalid response type'))
        except RuntimeError:
            log.error(
                'Connection lost before response written @ {}'.format(
                    self.request.ip))
        except Exception as e:
            self.bail_out(
                "Writing response failed, connection closed {}".format(
                    repr(e)))
        finally:
            if not keep_alive:
                self.transport.close()
            else:
                self._last_request_time = current_time
                self.cleanup()

    async def stream_response(self, response):
        """
        Streams a response to the client asynchronously. Attaches
        the transport to the response so the response consumer can
        write to the response as needed.
        """

        try:
            keep_alive = (
                self.parser.should_keep_alive() and not self.signal.stopped)

            response.transport = self.transport
            await response.stream(
                self.request.version, keep_alive, self.request_timeout)
        except AttributeError:
            log.error(
                ('Invalid response object for url {}, '
                 'Expected Type: HTTPResponse, Actual Type: {}').format(
                    self.url, type(response)))
            self.write_error(ServerError('Invalid response type'))
        except RuntimeError:
            log.error(
                'Connection lost before response written @ {}'.format(
                    self.request.ip))
        except Exception as e:
            self.bail_out(
                "Writing response failed, connection closed {}".format(
                    repr(e)))
        finally:
            if not keep_alive:
                self.transport.close()
            else:
                self._last_request_time = current_time
                self.cleanup()

    def write_error(self, exception):
        try:
            response = self.error_handler.response(self.request, exception)
            version = self.request.version if self.request else '1.1'
            self.transport.write(response.output(version))
        except RuntimeError:
            log.error(
                'Connection lost before error written @ {}'.format(
                    self.request.ip if self.request else 'Unknown'))
        except Exception as e:
            self.bail_out(
                "Writing error failed, connection closed {}".format(repr(e)),
                from_error=True)
        finally:
            self.transport.close()

    def bail_out(self, message, from_error=False):
        if from_error or self.transport.is_closing():
            log.error(
                ("Transport closed @ {} and exception "
                 "experienced during error handling").format(
                    self.transport.get_extra_info('peername')))
            log.debug(
                'Exception:\n{}'.format(traceback.format_exc()))
        else:
            exception = ServerError(message)
            self.write_error(exception)
            log.error(message)

    def cleanup(self):
        self.parser = None
        self.request = None
        self.url = None
        self.headers = None
        self._request_handler_task = None
        self._total_request_size = 0

    def close_if_idle(self):
        """Close the connection if a request is not being sent or received

        :return: boolean - True if closed, false if staying open
        """
        if not self.parser:
            self.transport.close()
            return True
        return False
Exemplo n.º 9
0
class HttpProtocol(asyncio.Protocol):
    __slots__ = (
        # event loop, connection
        'loop',
        'transport',
        'connections',
        'signal',
        # request params
        'parser',
        'request',
        'url',
        'headers',
        # request config
        'request_handler',
        'request_timeout',
        'response_timeout',
        'keep_alive_timeout',
        'request_max_size',
        'request_class',
        'is_request_stream',
        'router',
        # enable or disable access log purpose
        'access_log',
        # connection management
        '_total_request_size',
        '_request_timeout_handler',
        '_response_timeout_handler',
        '_keep_alive_timeout_handler',
        '_last_request_time',
        '_last_response_time',
        '_is_stream_handler')

    def __init__(self,
                 *,
                 loop,
                 request_handler,
                 error_handler,
                 signal=Signal(),
                 connections=set(),
                 request_timeout=60,
                 response_timeout=60,
                 keep_alive_timeout=5,
                 request_max_size=None,
                 request_class=None,
                 access_log=True,
                 keep_alive=True,
                 is_request_stream=False,
                 router=None,
                 state=None,
                 debug=False,
                 **kwargs):
        self.loop = loop
        self.transport = None
        self.request = None
        self.parser = None
        self.url = None
        self.headers = None
        self.router = router
        self.signal = signal
        self.access_log = access_log
        self.connections = connections
        self.request_handler = request_handler
        self.error_handler = error_handler
        self.request_timeout = request_timeout
        self.response_timeout = response_timeout
        self.keep_alive_timeout = keep_alive_timeout
        self.request_max_size = request_max_size
        self.request_class = request_class or Request
        self.is_request_stream = is_request_stream
        self._is_stream_handler = False
        self._total_request_size = 0
        self._request_timeout_handler = None
        self._response_timeout_handler = None
        self._keep_alive_timeout_handler = None
        self._last_request_time = None
        self._last_response_time = None
        self._request_handler_task = None
        self._request_stream_task = None
        self._keep_alive = keep_alive
        self._header_fragment = b''
        self.state = state if state else {}
        if 'requests_count' not in self.state:
            self.state['requests_count'] = 0
        self._debug = debug

    @property
    def keep_alive(self):
        return (self._keep_alive and not self.signal.stopped
                and self.parser.should_keep_alive())

    # -------------------------------------------- #
    # Connection
    # -------------------------------------------- #

    def connection_made(self, transport):
        self.connections.add(self)
        self._request_timeout_handler = self.loop.call_later(
            self.request_timeout, self.request_timeout_callback)
        self.transport = transport
        self._last_request_time = current_time

    def connection_lost(self, exc):
        self.connections.discard(self)
        if self._request_timeout_handler:
            self._request_timeout_handler.cancel()
        if self._response_timeout_handler:
            self._response_timeout_handler.cancel()
        if self._keep_alive_timeout_handler:
            self._keep_alive_timeout_handler.cancel()

    def request_timeout_callback(self):
        # See the docstring in the RequestTimeout exception, to see
        # exactly what this timeout is checking for.
        # Check if elapsed time since request initiated exceeds our
        # configured maximum request timeout value
        time_elapsed = current_time - self._last_request_time
        if time_elapsed < self.request_timeout:
            time_left = self.request_timeout - time_elapsed
            self._request_timeout_handler = (self.loop.call_later(
                time_left, self.request_timeout_callback))
        else:
            if self._request_stream_task:
                self._request_stream_task.cancel()
            if self._request_handler_task:
                self._request_handler_task.cancel()
            try:
                raise RequestTimeout('Request Timeout')
            except RequestTimeout as exception:
                self.write_error(exception)

    def response_timeout_callback(self):
        # Check if elapsed time since response was initiated exceeds our
        # configured maximum request timeout value
        time_elapsed = current_time - self._last_request_time
        if time_elapsed < self.response_timeout:
            time_left = self.response_timeout - time_elapsed
            self._response_timeout_handler = (self.loop.call_later(
                time_left, self.response_timeout_callback))
        else:
            if self._request_stream_task:
                self._request_stream_task.cancel()
            if self._request_handler_task:
                self._request_handler_task.cancel()
            try:
                raise ServiceUnavailable('Response Timeout')
            except ServiceUnavailable as exception:
                self.write_error(exception)

    def keep_alive_timeout_callback(self):
        # Check if elapsed time since last response exceeds our configured
        # maximum keep alive timeout value
        time_elapsed = current_time - self._last_response_time
        if time_elapsed < self.keep_alive_timeout:
            time_left = self.keep_alive_timeout - time_elapsed
            self._keep_alive_timeout_handler = (self.loop.call_later(
                time_left, self.keep_alive_timeout_callback))
        else:
            logger.debug('KeepAlive Timeout. Closing connection.')
            self.transport.close()
            self.transport = None

    # -------------------------------------------- #
    # Parsing
    # -------------------------------------------- #

    def data_received(self, data):
        # Check for the request itself getting too large and exceeding
        # memory limits
        self._total_request_size += len(data)
        if self._total_request_size > self.request_max_size:
            exception = PayloadTooLarge('Payload Too Large')
            self.write_error(exception)

        # Create parser if this is the first time we're receiving data
        if self.parser is None:
            assert self.request is None
            self.headers = []
            self.parser = HttpRequestParser(self)

        # requests count
        self.state['requests_count'] = self.state['requests_count'] + 1

        # Parse request chunk or close connection
        try:
            self.parser.feed_data(data)
        except HttpParserError:
            message = 'Bad Request'
            if self._debug:
                message += '\n' + traceback.format_exc()
            exception = InvalidUsage(message)
            self.write_error(exception)

    def on_url(self, url):
        if not self.url:
            self.url = url
        else:
            self.url += url

    def on_header(self, name, value):
        self._header_fragment += name

        if value is not None:
            if self._header_fragment == b'Content-Length' \
                    and int(value) > self.request_max_size:
                exception = PayloadTooLarge('Payload Too Large')
                self.write_error(exception)
            try:
                value = value.decode()
            except UnicodeDecodeError:
                value = value.decode('latin_1')
            self.headers.append(
                (self._header_fragment.decode().casefold(), value))

            self._header_fragment = b''

    def on_headers_complete(self):
        self.request = self.request_class(
            url_bytes=self.url,
            headers=CIMultiDict(self.headers),
            version=self.parser.get_http_version(),
            method=self.parser.get_method().decode(),
            transport=self.transport)
        # Remove any existing KeepAlive handler here,
        # It will be recreated if required on the new request.
        if self._keep_alive_timeout_handler:
            self._keep_alive_timeout_handler.cancel()
            self._keep_alive_timeout_handler = None
        if self.is_request_stream:
            self._is_stream_handler = self.router.is_stream_handler(
                self.request)
            if self._is_stream_handler:
                self.request.stream = asyncio.Queue()
                self.execute_request_handler()

    def on_body(self, body):
        if self.is_request_stream and self._is_stream_handler:
            self._request_stream_task = self.loop.create_task(
                self.request.stream.put(body))
            return
        self.request.body.append(body)

    def on_message_complete(self):
        # Entire request (headers and whole body) is received.
        # We can cancel and remove the request timeout handler now.
        if self._request_timeout_handler:
            self._request_timeout_handler.cancel()
            self._request_timeout_handler = None
        if self.is_request_stream and self._is_stream_handler:
            self._request_stream_task = self.loop.create_task(
                self.request.stream.put(None))
            return
        self.request.body = b''.join(self.request.body)
        self.execute_request_handler()

    def execute_request_handler(self):
        self._response_timeout_handler = self.loop.call_later(
            self.response_timeout, self.response_timeout_callback)
        self._last_request_time = current_time
        self._request_handler_task = self.loop.create_task(
            self.request_handler(self.request, self.write_response,
                                 self.stream_response))

    # -------------------------------------------- #
    # Responding
    # -------------------------------------------- #
    def log_response(self, response):
        if self.access_log:
            extra = {
                'status': getattr(response, 'status', 0),
            }

            if isinstance(response, HTTPResponse):
                extra['byte'] = len(response.body)
            else:
                extra['byte'] = -1

            extra['host'] = 'UNKNOWN'
            if self.request is not None:
                if self.request.ip:
                    extra['host'] = '{0}:{1}'.format(self.request.ip,
                                                     self.request.port)

                extra['request'] = '{0} {1}'.format(self.request.method,
                                                    self.request.url)
            else:
                extra['request'] = 'nil'

            access_logger.info('', extra=extra)

    def write_response(self, response):
        """
        Writes response content synchronously to the transport.
        """
        if self._response_timeout_handler:
            self._response_timeout_handler.cancel()
            self._response_timeout_handler = None
        try:
            keep_alive = self.keep_alive
            self.transport.write(
                response.output(self.request.version, keep_alive,
                                self.keep_alive_timeout))
            self.log_response(response)
        except AttributeError:
            logger.error(
                'Invalid response object for url %s, '
                'Expected Type: HTTPResponse, Actual Type: %s', self.url,
                type(response))
            self.write_error(ServerError('Invalid response type'))
        except RuntimeError:
            if self._debug:
                logger.error('Connection lost before response written @ %s',
                             self.request.ip)
            keep_alive = False
        except Exception as e:
            self.bail_out(
                "Writing response failed, connection closed {}".format(
                    repr(e)))
        finally:
            if not keep_alive:
                self.transport.close()
                self.transport = None
            else:
                self._keep_alive_timeout_handler = self.loop.call_later(
                    self.keep_alive_timeout, self.keep_alive_timeout_callback)
                self._last_response_time = current_time
                self.cleanup()

    async def stream_response(self, response):
        """
        Streams a response to the client asynchronously. Attaches
        the transport to the response so the response consumer can
        write to the response as needed.
        """
        if self._response_timeout_handler:
            self._response_timeout_handler.cancel()
            self._response_timeout_handler = None
        try:
            keep_alive = self.keep_alive
            response.transport = self.transport
            await response.stream(self.request.version, keep_alive,
                                  self.keep_alive_timeout)
            self.log_response(response)
        except AttributeError:
            logger.error(
                'Invalid response object for url %s, '
                'Expected Type: HTTPResponse, Actual Type: %s', self.url,
                type(response))
            self.write_error(ServerError('Invalid response type'))
        except RuntimeError:
            if self._debug:
                logger.error('Connection lost before response written @ %s',
                             self.request.ip)
            keep_alive = False
        except Exception as e:
            self.bail_out(
                "Writing response failed, connection closed {}".format(
                    repr(e)))
        finally:
            if not keep_alive:
                self.transport.close()
                self.transport = None
            else:
                self._keep_alive_timeout_handler = self.loop.call_later(
                    self.keep_alive_timeout, self.keep_alive_timeout_callback)
                self._last_response_time = current_time
                self.cleanup()

    def write_error(self, exception):
        # An error _is_ a response.
        # Don't throw a response timeout, when a response _is_ given.
        if self._response_timeout_handler:
            self._response_timeout_handler.cancel()
            self._response_timeout_handler = None
        response = None
        try:
            response = self.error_handler.response(self.request, exception)
            version = self.request.version if self.request else '1.1'
            self.transport.write(response.output(version))
        except RuntimeError:
            if self._debug:
                logger.error('Connection lost before error written @ %s',
                             self.request.ip if self.request else 'Unknown')
        except Exception as e:
            self.bail_out("Writing error failed, connection closed {}".format(
                repr(e)),
                          from_error=True)
        finally:
            if self.parser and (self.keep_alive
                                or getattr(response, 'status', 0) == 408):
                self.log_response(response)
            try:
                self.transport.close()
            except AttributeError as e:
                logger.debug('Connection lost before server could close it.')

    def bail_out(self, message, from_error=False):
        if from_error or self.transport.is_closing():
            logger.error(
                "Transport closed @ %s and exception "
                "experienced during error handling",
                self.transport.get_extra_info('peername'))
            logger.debug('Exception:\n%s', traceback.format_exc())
        else:
            exception = ServerError(message)
            self.write_error(exception)
            logger.error(message)

    def cleanup(self):
        """This is called when KeepAlive feature is used,
        it resets the connection in order for it to be able
        to handle receiving another request on the same connection."""
        self.parser = None
        self.request = None
        self.url = None
        self.headers = None
        self._request_handler_task = None
        self._request_stream_task = None
        self._total_request_size = 0
        self._is_stream_handler = False

    def close_if_idle(self):
        """Close the connection if a request is not being sent or received

        :return: boolean - True if closed, false if staying open
        """
        if not self.parser:
            self.transport.close()
            return True
        return False

    def close(self):
        """
        Force close the connection.
        """
        if self.transport is not None:
            self.transport.close()
            self.transport = None
Exemplo n.º 10
0
class HttpProtocol(SanicHttpProtocol):
    __slots__ = tuple()

    def __init__(self,
                 *,
                 loop,
                 request_handler,
                 error_handler,
                 signal=None,
                 connections=set(),
                 request_timeout=60,
                 response_timeout=60,
                 keep_alive_timeout=5,
                 request_max_size=None,
                 request_class=None,
                 access_log=True,
                 keep_alive=True,
                 is_request_stream=False,
                 router=None,
                 state=None,
                 debug=False,
                 **kwargs):
        signal = signal or Signal()
        request_class = request_class or Request
        super(HttpProtocol, self).\
            __init__(loop=loop, request_handler=request_handler,
                     error_handler=error_handler, signal=signal,
                     connections=connections, request_timeout=request_timeout,
                     response_timeout=response_timeout, keep_alive_timeout=keep_alive_timeout,
                     request_max_size=request_max_size, request_class=request_class,
                     access_log=access_log, keep_alive=keep_alive,
                     is_request_stream=is_request_stream,
                     router=router, state=state, debug=debug, **kwargs)

    def request_timeout_callback(self):
        # See the docstring in the RequestTimeout exception, to see
        # exactly what this timeout is checking for.
        # Check if elapsed time since request initiated exceeds our
        # configured maximum request timeout value
        time_elapsed = server.current_time - self._last_request_time
        if time_elapsed < self.request_timeout:
            time_left = self.request_timeout - time_elapsed
            self._request_timeout_handler = (self.loop.call_later(
                time_left, self.request_timeout_callback))
        else:
            if self._request_stream_task:
                self._request_stream_task.cancel()
            if self._request_handler_task:
                self._request_handler_task.cancel()
            try:
                raise RequestTimeout('Request Timeout')
            except RequestTimeout as exception:
                self.write_error(exception)

    def response_timeout_callback(self):
        # Check if elapsed time since response was initiated exceeds our
        # configured maximum request timeout value
        time_elapsed = server.current_time - self._last_request_time
        if time_elapsed < self.response_timeout:
            time_left = self.response_timeout - time_elapsed
            self._response_timeout_handler = (self.loop.call_later(
                time_left, self.response_timeout_callback))
        else:
            if self._request_stream_task:
                self._request_stream_task.cancel()
            if self._request_handler_task:
                self._request_handler_task.cancel()
            try:
                raise ServiceUnavailable('Response Timeout')
            except ServiceUnavailable as exception:
                self.write_error(exception)

    def keep_alive_timeout_callback(self):
        # Check if elapsed time since last response exceeds our configured
        # maximum keep alive timeout value

        time_elapsed = server.current_time - self._last_response_time
        if time_elapsed < self.keep_alive_timeout:
            time_left = self.keep_alive_timeout - time_elapsed
            self._keep_alive_timeout_handler = (self.loop.call_later(
                time_left, self.keep_alive_timeout_callback))
        else:
            logger.debug('KeepAlive Timeout. Closing connection.')
            self.transport.close()
            self.transport = None

    def data_received(self, data):
        # Check for the request itself getting too large and exceeding
        # memory limits
        self._total_request_size += len(data)
        if self._total_request_size > self.request_max_size:
            exception = PayloadTooLarge('Payload Too Large')
            self.write_error(exception)

        # Create parser if this is the first time we're receiving data
        if self.parser is None:
            assert self.request is None
            self.headers = []
            self.parser = HttpRequestParser(self)

        # requests count
        self.state['requests_count'] = self.state['requests_count'] + 1

        # Parse request chunk or close connection
        try:
            self.parser.feed_data(data)
        except HttpParserError:
            message = 'Bad Request'
            if self._debug:
                message += '\n' + traceback.format_exc()
            exception = InvalidUsage(message)
            self.write_error(exception)

    def on_header(self, name, value):
        self._header_fragment += name

        if value is not None:
            if self._header_fragment == b'Content-Length' \
                    and int(value) > self.request_max_size:
                exception = PayloadTooLarge('Payload Too Large')
                self.write_error(exception)
            try:
                value = value.decode()
            except UnicodeDecodeError:
                value = value.decode('latin_1')
            self.headers.append(
                (self._header_fragment.decode().casefold(), value))
            self._header_fragment = b''

    def on_headers_complete(self):
        self.request = self.request_class(
            url_bytes=self.url,
            headers=CIMultiDict(self.headers),
            version=self.parser.get_http_version(),
            method=self.parser.get_method().decode(),
            transport=self.transport)
        # Remove any existing KeepAlive handler here,
        # It will be recreated if required on the new request.
        if self._keep_alive_timeout_handler:
            self._keep_alive_timeout_handler.cancel()
            self._keep_alive_timeout_handler = None
        if self.is_request_stream:
            self._is_stream_handler = self.router.is_stream_handler(
                self.request)
            if self._is_stream_handler:
                self.request.stream = asyncio.Queue()
                self.execute_request_handler()

    def write_response(self, response):
        """
        Writes response content synchronously to the transport.
        """
        if self._response_timeout_handler:
            self._response_timeout_handler.cancel()
            self._response_timeout_handler = None
        try:
            keep_alive = self.keep_alive
            self.transport.write(
                response.output(self.request.version, keep_alive,
                                self.keep_alive_timeout))
            self.log_response(response)
        except AttributeError:
            logger.error(
                'Invalid response object for url %s, '
                'Expected Type: HTTPResponse, Actual Type: %s', self.url,
                type(response))
            self.write_error(ServerError('Invalid response type'))
        except RuntimeError:
            if self._debug:
                logger.error('Connection lost before response written @ %s',
                             self.request.ip)
            keep_alive = False
        except Exception as e:
            self.bail_out(
                "Writing response failed, connection closed {}".format(
                    repr(e)))
        finally:
            if not keep_alive:
                self.transport.close()
                self.transport = None
            else:
                self._keep_alive_timeout_handler = self.loop.call_later(
                    self.keep_alive_timeout, self.keep_alive_timeout_callback)
                self._last_response_time = server.current_time
                self.cleanup()

    async def stream_response(self, response):
        """
        Streams a response to the client asynchronously. Attaches
        the transport to the response so the response consumer can
        write to the response as needed.
        """
        if self._response_timeout_handler:
            self._response_timeout_handler.cancel()
            self._response_timeout_handler = None
        try:
            keep_alive = self.keep_alive
            response.transport = self.transport
            await response.stream(self.request.version, keep_alive,
                                  self.keep_alive_timeout)
            self.log_response(response)
        except AttributeError:
            logger.error(
                'Invalid response object for url %s, '
                'Expected Type: HTTPResponse, Actual Type: %s', self.url,
                type(response))
            self.write_error(ServerError('Invalid response type'))
        except RuntimeError:
            if self._debug:
                logger.error('Connection lost before response written @ %s',
                             self.request.ip)
            keep_alive = False
        except Exception as e:
            self.bail_out(
                "Writing response failed, connection closed {}".format(
                    repr(e)))
        finally:
            if not keep_alive:
                self.transport.close()
                self.transport = None
            else:
                self._keep_alive_timeout_handler = self.loop.call_later(
                    self.keep_alive_timeout, self.keep_alive_timeout_callback)
                self._last_response_time = server.current_time
                self.cleanup()

    def bail_out(self, message, from_error=False):
        if from_error or self.transport.is_closing():
            logger.error(
                "Transport closed @ %s and exception "
                "experienced during error handling",
                self.transport.get_extra_info('peername'))
            logger.debug('Exception:\n%s', traceback.format_exc())
        else:
            exception = ServerError(message)
            self.write_error(exception)
            logger.error(message)
Exemplo n.º 11
0
 def data_received(self, data):
     hrp = HttpRequestParser(self)
     hrp.feed_data(data)
     self.http_version = hrp.get_http_version()
     self.method = hrp.get_method()
     before_requests = asyncio. async (self.call_before_requests())
Exemplo n.º 12
0
class HttpProtocol(asyncio.Protocol):

    __slots__ = ("_route_", "_loop", "_transport", "_parser", "_request")

    def __init__(self, event_loop=None, route=None, objt_request=None):
        self._route = route
        self._loop = event_loop
        self._transport = None
        self._parser = HttpRequestParser(self)
        self._request = objt_request()

    def connection_made(self, transport):
        self._transport = transport

    def data_received(self, data):
        try:
            self._parser.feed_data(data)
        except HttpParserError:
            Logger.info("bad request")

    def connection_lost(self, exc):
        self._transport.close()

    def on_url(self, uri):
        self._request.uri = uri.decode()

    def on_header(self, name, value):
        self._request.headers[name] = value

    def on_headers_complete(self):

        self._request.version = self._parser.get_http_version()
        self._request.method = self._parser.get_method().decode()

    def on_body(self, body):
        self._request.body.append(body)

    def on_message_complete(self):
        if self._request.body:
            self._request.body = b"".join(self._request.body)

        self._loop.create_task(
            self.start_response(request=self._request,
                                transport=self._transport))

    async def start_response(self, transport, request):

        view_func = self._route._router.get(request.url, None)

        if view_func is None:
            transport.write(
                b'HTTP/1.1 404 Not Found\r\nServer: aquarius\r\nContent-Length:9\r\n\r\nNot Found\r\n\r\n'
            )

        else:
            view_obj = view_func.__dict__
            if request.method not in view_obj.get("allowed_method"):
                transport.close()

            if view_obj.get("async"):
                content = await view_func(request)
            else:
                content = view_func(request)

            try:
                transport.write(content)
            except Exception as e:
                transport.close()

        if request.version == "1.0":
            transport.close()
Exemplo n.º 13
0
class HttpProtocol(asyncio.Protocol):
    """
    This class provides a basic HTTP implementation of the sanic framework.
    """

    __slots__ = (
        # app
        "app",
        # event loop, connection
        "loop",
        "transport",
        "connections",
        "signal",
        # request params
        "parser",
        "request",
        "url",
        "headers",
        # request config
        "request_handler",
        "request_timeout",
        "response_timeout",
        "keep_alive_timeout",
        "request_max_size",
        "request_buffer_queue_size",
        "request_class",
        "is_request_stream",
        "router",
        "error_handler",
        # enable or disable access log purpose
        "access_log",
        # connection management
        "_total_request_size",
        "_request_timeout_handler",
        "_response_timeout_handler",
        "_keep_alive_timeout_handler",
        "_last_request_time",
        "_last_response_time",
        "_is_stream_handler",
        "_not_paused",
        "_request_handler_task",
        "_request_stream_task",
        "_keep_alive",
        "_header_fragment",
        "state",
        "_debug",
    )

    def __init__(
        self,
        *,
        loop,
        app,
        request_handler,
        error_handler,
        signal=Signal(),
        connections=None,
        request_timeout=60,
        response_timeout=60,
        keep_alive_timeout=5,
        request_max_size=None,
        request_buffer_queue_size=100,
        request_class=None,
        access_log=True,
        keep_alive=True,
        is_request_stream=False,
        router=None,
        state=None,
        debug=False,
        **kwargs
    ):
        self.loop = loop
        self.app = app
        self.transport = None
        self.request = None
        self.parser = None
        self.url = None
        self.headers = None
        self.router = router
        self.signal = signal
        self.access_log = access_log
        self.connections = connections if connections is not None else set()
        self.request_handler = request_handler
        self.error_handler = error_handler
        self.request_timeout = request_timeout
        self.request_buffer_queue_size = request_buffer_queue_size
        self.response_timeout = response_timeout
        self.keep_alive_timeout = keep_alive_timeout
        self.request_max_size = request_max_size
        self.request_class = request_class or Request
        self.is_request_stream = is_request_stream
        self._is_stream_handler = False
        self._not_paused = asyncio.Event(loop=loop)
        self._total_request_size = 0
        self._request_timeout_handler = None
        self._response_timeout_handler = None
        self._keep_alive_timeout_handler = None
        self._last_request_time = None
        self._last_response_time = None
        self._request_handler_task = None
        self._request_stream_task = None
        self._keep_alive = keep_alive
        self._header_fragment = b""
        self.state = state if state else {}
        if "requests_count" not in self.state:
            self.state["requests_count"] = 0
        self._debug = debug
        self._not_paused.set()

    @property
    def keep_alive(self):
        """
        Check if the connection needs to be kept alive based on the params
        attached to the `_keep_alive` attribute, :attr:`Signal.stopped`
        and :func:`HttpProtocol.parser.should_keep_alive`

        :return: ``True`` if connection is to be kept alive ``False`` else
        """
        return (
            self._keep_alive
            and not self.signal.stopped
            and self.parser.should_keep_alive()
        )

    # -------------------------------------------- #
    # Connection
    # -------------------------------------------- #

    def connection_made(self, transport):
        self.connections.add(self)
        self._request_timeout_handler = self.loop.call_later(
            self.request_timeout, self.request_timeout_callback
        )
        self.transport = transport
        self._last_request_time = time()

    def connection_lost(self, exc):
        self.connections.discard(self)
        if self._request_handler_task:
            self._request_handler_task.cancel()
        if self._request_stream_task:
            self._request_stream_task.cancel()
        if self._request_timeout_handler:
            self._request_timeout_handler.cancel()
        if self._response_timeout_handler:
            self._response_timeout_handler.cancel()
        if self._keep_alive_timeout_handler:
            self._keep_alive_timeout_handler.cancel()

    def pause_writing(self):
        self._not_paused.clear()

    def resume_writing(self):
        self._not_paused.set()

    def request_timeout_callback(self):
        # See the docstring in the RequestTimeout exception, to see
        # exactly what this timeout is checking for.
        # Check if elapsed time since request initiated exceeds our
        # configured maximum request timeout value
        time_elapsed = time() - self._last_request_time
        if time_elapsed < self.request_timeout:
            time_left = self.request_timeout - time_elapsed
            self._request_timeout_handler = self.loop.call_later(
                time_left, self.request_timeout_callback
            )
        else:
            if self._request_stream_task:
                self._request_stream_task.cancel()
            if self._request_handler_task:
                self._request_handler_task.cancel()
            self.write_error(RequestTimeout("Request Timeout"))

    def response_timeout_callback(self):
        # Check if elapsed time since response was initiated exceeds our
        # configured maximum request timeout value
        time_elapsed = time() - self._last_request_time
        if time_elapsed < self.response_timeout:
            time_left = self.response_timeout - time_elapsed
            self._response_timeout_handler = self.loop.call_later(
                time_left, self.response_timeout_callback
            )
        else:
            if self._request_stream_task:
                self._request_stream_task.cancel()
            if self._request_handler_task:
                self._request_handler_task.cancel()
            self.write_error(ServiceUnavailable("Response Timeout"))

    def keep_alive_timeout_callback(self):
        """
        Check if elapsed time since last response exceeds our configured
        maximum keep alive timeout value and if so, close the transport
        pipe and let the response writer handle the error.

        :return: None
        """
        time_elapsed = time() - self._last_response_time
        if time_elapsed < self.keep_alive_timeout:
            time_left = self.keep_alive_timeout - time_elapsed
            self._keep_alive_timeout_handler = self.loop.call_later(
                time_left, self.keep_alive_timeout_callback
            )
        else:
            logger.debug("KeepAlive Timeout. Closing connection.")
            self.transport.close()
            self.transport = None

    # -------------------------------------------- #
    # Parsing
    # -------------------------------------------- #

    def data_received(self, data):
        # Check for the request itself getting too large and exceeding
        # memory limits
        self._total_request_size += len(data)
        if self._total_request_size > self.request_max_size:
            self.write_error(PayloadTooLarge("Payload Too Large"))

        # Create parser if this is the first time we're receiving data
        if self.parser is None:
            assert self.request is None
            self.headers = []
            self.parser = HttpRequestParser(self)

        # requests count
        self.state["requests_count"] = self.state["requests_count"] + 1

        # Parse request chunk or close connection
        try:
            self.parser.feed_data(data)
        except HttpParserError:
            message = "Bad Request"
            if self._debug:
                message += "\n" + traceback.format_exc()
            self.write_error(InvalidUsage(message))

    def on_url(self, url):
        if not self.url:
            self.url = url
        else:
            self.url += url

    def on_header(self, name, value):
        self._header_fragment += name

        if value is not None:
            if (
                self._header_fragment == b"Content-Length"
                and int(value) > self.request_max_size
            ):
                self.write_error(PayloadTooLarge("Payload Too Large"))
            try:
                value = value.decode()
            except UnicodeDecodeError:
                value = value.decode("latin_1")
            self.headers.append(
                (self._header_fragment.decode().casefold(), value)
            )

            self._header_fragment = b""

    def on_headers_complete(self):
        self.request = self.request_class(
            url_bytes=self.url,
            headers=CIMultiDict(self.headers),
            version=self.parser.get_http_version(),
            method=self.parser.get_method().decode(),
            transport=self.transport,
            app=self.app,
        )
        # Remove any existing KeepAlive handler here,
        # It will be recreated if required on the new request.
        if self._keep_alive_timeout_handler:
            self._keep_alive_timeout_handler.cancel()
            self._keep_alive_timeout_handler = None
        if self.is_request_stream:
            self._is_stream_handler = self.router.is_stream_handler(
                self.request
            )
            if self._is_stream_handler:
                self.request.stream = StreamBuffer(
                    self.request_buffer_queue_size
                )
                self.execute_request_handler()

    def on_body(self, body):
        if self.is_request_stream and self._is_stream_handler:
            self._request_stream_task = self.loop.create_task(
                self.body_append(body)
            )
        else:
            self.request.body_push(body)

    async def body_append(self, body):
        if self.request.stream.is_full():
            self.transport.pause_reading()
            await self.request.stream.put(body)
            self.transport.resume_reading()
        else:
            await self.request.stream.put(body)

    def on_message_complete(self):
        # Entire request (headers and whole body) is received.
        # We can cancel and remove the request timeout handler now.
        if self._request_timeout_handler:
            self._request_timeout_handler.cancel()
            self._request_timeout_handler = None
        if self.is_request_stream and self._is_stream_handler:
            self._request_stream_task = self.loop.create_task(
                self.request.stream.put(None)
            )
            return
        self.request.body_finish()
        self.execute_request_handler()

    def execute_request_handler(self):
        """
        Invoke the request handler defined by the
        :func:`sanic.app.Sanic.handle_request` method

        :return: None
        """
        self._response_timeout_handler = self.loop.call_later(
            self.response_timeout, self.response_timeout_callback
        )
        self._last_request_time = time()
        self._request_handler_task = self.loop.create_task(
            self.request_handler(
                self.request, self.write_response, self.stream_response
            )
        )

    # -------------------------------------------- #
    # Responding
    # -------------------------------------------- #
    def log_response(self, response):
        """
        Helper method provided to enable the logging of responses in case if
        the :attr:`HttpProtocol.access_log` is enabled.

        :param response: Response generated for the current request

        :type response: :class:`sanic.response.HTTPResponse` or
            :class:`sanic.response.StreamingHTTPResponse`

        :return: None
        """
        if self.access_log:
            extra = {"status": getattr(response, "status", 0)}

            if isinstance(response, HTTPResponse):
                extra["byte"] = len(response.body)
            else:
                extra["byte"] = -1

            extra["host"] = "UNKNOWN"
            if self.request is not None:
                if self.request.ip:
                    extra["host"] = "{0}:{1}".format(
                        self.request.ip, self.request.port
                    )

                extra["request"] = "{0} {1}".format(
                    self.request.method, self.request.url
                )
            else:
                extra["request"] = "nil"

            access_logger.info("", extra=extra)

    def write_response(self, response):
        """
        Writes response content synchronously to the transport.
        """
        if self._response_timeout_handler:
            self._response_timeout_handler.cancel()
            self._response_timeout_handler = None
        try:
            keep_alive = self.keep_alive
            self.transport.write(
                response.output(
                    self.request.version, keep_alive, self.keep_alive_timeout
                )
            )
            self.log_response(response)
        except AttributeError:
            logger.error(
                "Invalid response object for url %s, "
                "Expected Type: HTTPResponse, Actual Type: %s",
                self.url,
                type(response),
            )
            self.write_error(ServerError("Invalid response type"))
        except RuntimeError:
            if self._debug:
                logger.error(
                    "Connection lost before response written @ %s",
                    self.request.ip,
                )
            keep_alive = False
        except Exception as e:
            self.bail_out(
                "Writing response failed, connection closed {}".format(repr(e))
            )
        finally:
            if not keep_alive:
                self.transport.close()
                self.transport = None
            else:
                self._keep_alive_timeout_handler = self.loop.call_later(
                    self.keep_alive_timeout, self.keep_alive_timeout_callback
                )
                self._last_response_time = time()
                self.cleanup()

    async def drain(self):
        await self._not_paused.wait()

    def push_data(self, data):
        self.transport.write(data)

    async def stream_response(self, response):
        """
        Streams a response to the client asynchronously. Attaches
        the transport to the response so the response consumer can
        write to the response as needed.
        """
        if self._response_timeout_handler:
            self._response_timeout_handler.cancel()
            self._response_timeout_handler = None

        try:
            keep_alive = self.keep_alive
            response.protocol = self
            await response.stream(
                self.request.version, keep_alive, self.keep_alive_timeout
            )
            self.log_response(response)
        except AttributeError:
            logger.error(
                "Invalid response object for url %s, "
                "Expected Type: HTTPResponse, Actual Type: %s",
                self.url,
                type(response),
            )
            self.write_error(ServerError("Invalid response type"))
        except RuntimeError:
            if self._debug:
                logger.error(
                    "Connection lost before response written @ %s",
                    self.request.ip,
                )
            keep_alive = False
        except Exception as e:
            self.bail_out(
                "Writing response failed, connection closed {}".format(repr(e))
            )
        finally:
            if not keep_alive:
                self.transport.close()
                self.transport = None
            else:
                self._keep_alive_timeout_handler = self.loop.call_later(
                    self.keep_alive_timeout, self.keep_alive_timeout_callback
                )
                self._last_response_time = time()
                self.cleanup()

    def write_error(self, exception):
        # An error _is_ a response.
        # Don't throw a response timeout, when a response _is_ given.
        if self._response_timeout_handler:
            self._response_timeout_handler.cancel()
            self._response_timeout_handler = None
        response = None
        try:
            response = self.error_handler.response(self.request, exception)
            version = self.request.version if self.request else "1.1"
            self.transport.write(response.output(version))
        except RuntimeError:
            if self._debug:
                logger.error(
                    "Connection lost before error written @ %s",
                    self.request.ip if self.request else "Unknown",
                )
        except Exception as e:
            self.bail_out(
                "Writing error failed, connection closed {}".format(repr(e)),
                from_error=True,
            )
        finally:
            if self.parser and (
                self.keep_alive or getattr(response, "status", 0) == 408
            ):
                self.log_response(response)
            try:
                self.transport.close()
            except AttributeError:
                logger.debug("Connection lost before server could close it.")

    def bail_out(self, message, from_error=False):
        """
        In case if the transport pipes are closed and the sanic app encounters
        an error while writing data to the transport pipe, we log the error
        with proper details.

        :param message: Error message to display
        :param from_error: If the bail out was invoked while handling an
            exception scenario.

        :type message: str
        :type from_error: bool

        :return: None
        """
        if from_error or self.transport is None or self.transport.is_closing():
            logger.error(
                "Transport closed @ %s and exception "
                "experienced during error handling",
                (
                    self.transport.get_extra_info("peername")
                    if self.transport is not None
                    else "N/A"
                ),
            )
            logger.debug("Exception:", exc_info=True)
        else:
            self.write_error(ServerError(message))
            logger.error(message)

    def cleanup(self):
        """This is called when KeepAlive feature is used,
        it resets the connection in order for it to be able
        to handle receiving another request on the same connection."""
        self.parser = None
        self.request = None
        self.url = None
        self.headers = None
        self._request_handler_task = None
        self._request_stream_task = None
        self._total_request_size = 0
        self._is_stream_handler = False

    def close_if_idle(self):
        """Close the connection if a request is not being sent or received

        :return: boolean - True if closed, false if staying open
        """
        if not self.parser:
            self.transport.close()
            return True
        return False

    def close(self):
        """
        Force close the connection.
        """
        if self.transport is not None:
            self.transport.close()
            self.transport = None
Exemplo n.º 14
0
class _HTTPServerProtocol(asyncio.Protocol):
    """ HTTP Protocol handler.
        Should only be used by HTTPServerTransport
    """
    __slots__ = ('_parent', '_transport', '_task', 'data', 'http_parser',
                 'request')

    def __init__(self, *, parent, loop):
        self._parent = parent
        self._transport = None
        self.data = None
        self.http_parser = HttpRequestParser(self)
        self.request = None
        self._loop = loop
        self._task: asyncio.Task = None

    """ The next 3 methods are for asyncio.Protocol handling """

    def connection_made(self, transport):
        self._transport = transport
        self._parent._connections.add(self)

    def connection_lost(self, exc):
        self._parent._connections.discard(self)
        if self._task:
            self._task.cancel()
        self._transport = None

    def data_received(self, data):
        try:
            self.http_parser.feed_data(data)
        except HttpParserError as e:
            traceback.print_exc()
            logger.error('Bad http: %s', self.request)
            if self._transport:
                self.send_response(
                    Response(status=400,
                             body={
                                 'reason': 'Invalid HTTP',
                                 'details': str(e)
                             }))

    """ 
    The following methods are for HTTP parsing (from httptools)
    """

    def on_message_begin(self):
        self.request = Request()
        self.data = b''

    def on_header(self, name, value):
        key = name.decode('latin-1').lower()
        if not value:
            value = b''

        val = value.decode()
        self.request.headers[key] = val
        if key == 'x-correlation-id':
            self.request.correlation_id = val
        if key == 'content-type':
            self.request.content_type = val

    def on_headers_complete(self):
        self.request.method = self.http_parser.get_method().decode('latin-1')

    def on_body(self, body: bytes):
        self.data += body

    def on_message_complete(self):
        self.request.body = self.data
        task = self._loop.create_task(
            self._parent.handle_incoming_request(self.request))
        task.add_done_callback(self.handle_response)
        self._task = task

    def on_url(self, url):
        url = url.replace(b'//', b'/')
        url = parse_url(url)
        if url.query:
            # query = urllib.parse.unquote(url.query.decode('latin-1'))
            self.request.query_string = url.query.decode('latin-1')
        path = urllib.parse.unquote(url.path.decode('latin-1'))
        if path.startswith(self._parent.prefix):
            path = path[len(self._parent.prefix):]
        self.request.path = path

    """
    End parsing methods
    """

    def handle_response(self, future):
        try:
            self.send_response(future.result())
        except Exception:
            traceback.print_exc()
            self.send_response(
                Response(status=500,
                         body={'reason': 'Something really bad happened'},
                         content_type=self.request.app.default_content_type))

    def send_response(self, response):
        if response is None:
            # connection closed, no response
            return

        headers = 'HTTP/1.1 {status_code} {status_message}\r\n'.format(
            status_code=response.status.value,
            status_message=response.status.phrase,
        )
        headers += 'Connection: close\r\n'
        # if self._parent.shutting_down:
        # headers += 'Connection: close\r\n'
        # else:
        # headers += 'Connection: keep-alive\r\n'
        # headers += 'Keep-Alive: timeout=5, max=50\r\n'

        if response.raw_body:
            headers += 'Content-Type: {}\r\n'.format(response.content_type)
            headers += 'Content-Length: {}\r\n'.format(len(response.raw_body))
            if ('transfer-encoding' in response.headers
                    or 'Transfer-Encoding' in response.headers):
                print('Httptoolstransport currently doesnt support '
                      'chunked mode, attempting without.')
                response.headers.pop('transfer-encoding', None)
                response.headers.pop('Transfer-Encoding', None)
        else:
            headers += 'Content-Length: {}\r\n'.format(0)
        for header, value in response.headers.items():
            if header in ('Content-Length', 'content-lenth'):
                continue
            headers += '{header}: {value}\r\n'.format(header=header,
                                                      value=value)

        result = headers.encode('latin-1') + b'\r\n'
        if response.raw_body:
            result += response.raw_body

        try:
            self._transport.write(result)
        except AttributeError:
            # "NoneType has no attribute 'write'" because transport is closed
            logger.debug(
                'Connection closed prematurely, most likely by client')
        self.request = 0
        self.data = 0
        self.attempt_close()

    def attempt_close(self):
        if self.request == 0 and self._transport:
            self._transport.close()
Exemplo n.º 15
0
class HttpProtocol(asyncio.Protocol):
    __slots__ = (
        # event loop, connection
        'loop', 'transport', 'connections', 'signal',
        # request params
        'parser', 'request', 'url', 'headers',
        # request config
        'request_handler', 'request_timeout', 'request_max_size',
        # connection management
        '_total_request_size', '_timeout_handler', '_last_communication_time')

    def __init__(self, *, loop, request_handler, error_handler,
                 signal=Signal(), connections=set(), request_timeout=60,
                 request_max_size=None):
        self.loop = loop
        self.transport = None
        self.request = None
        self.parser = None
        self.url = None
        self.headers = None
        self.signal = signal
        self.connections = connections
        self.request_handler = request_handler
        self.error_handler = error_handler
        self.request_timeout = request_timeout
        self.request_max_size = request_max_size
        self._total_request_size = 0
        self._timeout_handler = None
        self._last_request_time = None
        self._request_handler_task = None

    # -------------------------------------------- #
    # Connection
    # -------------------------------------------- #

    def connection_made(self, transport):
        self.connections.add(self)
        self._timeout_handler = self.loop.call_later(
            self.request_timeout, self.connection_timeout)
        self.transport = transport
        self._last_request_time = current_time

    def connection_lost(self, exc):
        self.connections.discard(self)
        self._timeout_handler.cancel()

    def connection_timeout(self):
        # Check if
        time_elapsed = current_time - self._last_request_time
        if time_elapsed < self.request_timeout:
            time_left = self.request_timeout - time_elapsed
            self._timeout_handler = (
                self.loop.call_later(time_left, self.connection_timeout))
        else:
            if self._request_handler_task:
                self._request_handler_task.cancel()
            exception = RequestTimeout('Request Timeout')
            self.write_error(exception)

    # -------------------------------------------- #
    # Parsing
    # -------------------------------------------- #

    def data_received(self, data):
        # Check for the request itself getting too large and exceeding
        # memory limits
        self._total_request_size += len(data)
        if self._total_request_size > self.request_max_size:
            exception = PayloadTooLarge('Payload Too Large')
            self.write_error(exception)

        # Create parser if this is the first time we're receiving data
        if self.parser is None:
            assert self.request is None
            self.headers = []
            self.parser = HttpRequestParser(self)

        # Parse request chunk or close connection
        try:
            self.parser.feed_data(data)
        except HttpParserError:
            exception = InvalidUsage('Bad Request')
            self.write_error(exception)

    def on_url(self, url):
        self.url = url

    def on_header(self, name, value):
        if name == b'Content-Length' and int(value) > self.request_max_size:
            exception = PayloadTooLarge('Payload Too Large')
            self.write_error(exception)

        self.headers.append((name.decode().casefold(), value.decode()))

    def on_headers_complete(self):
        self.request = Request(
            url_bytes=self.url,
            headers=CIDict(self.headers),
            version=self.parser.get_http_version(),
            method=self.parser.get_method().decode(),
            transport=self.transport
        )

    def on_body(self, body):
        self.request.body.append(body)

    def on_message_complete(self):
        if self.request.body:
            self.request.body = b''.join(self.request.body)
        self._request_handler_task = self.loop.create_task(
            self.request_handler(self.request, self.write_response))

    # -------------------------------------------- #
    # Responding
    # -------------------------------------------- #

    def write_response(self, response):
        try:
            keep_alive = (
                self.parser.should_keep_alive() and not self.signal.stopped)
            self.transport.write(
                response.output(
                    self.request.version, keep_alive, self.request_timeout))
        except RuntimeError:
            log.error(
                'Connection lost before response written @ {}'.format(
                    self.request.ip))
        except Exception as e:
            self.bail_out(
                "Writing response failed, connection closed {}".format(e))
        finally:
            if not keep_alive:
                self.transport.close()
            else:
                # Record that we received data
                self._last_request_time = current_time
                self.cleanup()

    def write_error(self, exception):
        try:
            response = self.error_handler.response(self.request, exception)
            version = self.request.version if self.request else '1.1'
            self.transport.write(response.output(version))
        except RuntimeError:
            log.error(
                'Connection lost before error written @ {}'.format(
                    self.request.ip))
        except Exception as e:
            self.bail_out(
                "Writing error failed, connection closed {}".format(e),
                from_error=True)
        finally:
            self.transport.close()

    def bail_out(self, message, from_error=False):
        if from_error and self.transport.is_closing():
            log.error(
                ("Transport closed @ {} and exception "
                 "experienced during error handling").format(
                    self.transport.get_extra_info('peername')))
            log.debug(
                'Exception:\n{}'.format(traceback.format_exc()))
        else:
            exception = ServerError(message)
            self.write_error(exception)
            log.error(message)

    def cleanup(self):
        self.parser = None
        self.request = None
        self.url = None
        self.headers = None
        self._request_handler_task = None
        self._total_request_size = 0

    def close_if_idle(self):
        """
        Close the connection if a request is not being sent or received
        :return: boolean - True if closed, false if staying open
        """
        if not self.parser:
            self.transport.close()
            return True
        return False
Exemplo n.º 16
0
class LuyProtocol(asyncio.Protocol):
    def __init__(self, app, loop=None, keep_alive=True):
        self.parser = None
        self.url = None
        self._request_handler_task = None
        self.loop = loop

        self.header = {}
        self.app = app
        self.keep_alive = keep_alive

    def connection_made(self, transport):
        self.transport = transport
        if self.parser is None:
            self.parser = HttpRequestParser(self)

    def connection_lost(self, exc):

        self.transport.close()
        self.refresh()
        self.transport = None

    #-------------------------------------
    #               parsing
    #-------------------------------------
    def data_received(self, data):
        try:
            self.parser.feed_data(data)
        except HttpParserError as e:
            print('出错', e)
        finally:
            pass

    def on_message_begin(self):
        # print('on_message_begin')
        pass

    def on_url(self, url):
        # storing the url from web
        if not self.url:
            self.url = url
        else:
            self.url += url

    def on_header(self, name, value):
        '''
        Content-Length cannot be too long,
        protect the server
        '''
        if value is not None:
            if name == b'Content-Length' and int(value) > 1500:
                self.write_error()

            self.header[name.decode().casefold()] = value.decode()

    def on_headers_complete(self):
        self.request = request_class(url=self.url.decode(),
                                     header=self.header,
                                     version=self.parser.get_http_version(),
                                     method=self.parser.get_method().decode())

    def on_body(self, body):
        self.request.body.append(body)

    def on_message_complete(self):
        # print('on_message_complete')
        self._request_handler_task = self.loop.create_task(
            self.app.request_handler(self.request, self.write_response, None))

    #---------------------------
    #      error handling
    #---------------------------
    def write_error(self):
        response = html('bad connecton', status=400)
        self.write_response(response)
        self.transport.close()

    #-------------------------------------
    #            write response
    #-------------------------------------

    def write_response(self, response):
        '''
        the writing phase is very fast
        so may not have to use coroutine
        '''
        try:
            keep_alive = self.keep_alive
            self.transport.write(response.drain(keep_alive=keep_alive))
            if keep_alive:
                self.refresh()
            else:
                self.transport.close()
        except AttributeError as e:
            print('AttributeError????', e)
            self.transport.close()
        except RuntimeError as e:
            print('RuntimeError????', e)
            self.transport.close()
        except Exception as e:
            print('Exception????', e)
            self.transport.close()

    def refresh(self):
        '''
        refresh the server state
        prepare for next incoming request
        '''
        self.url = None
        self.header = {}
Exemplo n.º 17
0
class HttpProtocol(asyncio.Protocol):
    """
    HTTP 协议
    """
    # 插槽
    __slots__ = (
        # 事件循环, 连接
        'loop', 'transport', 'connections', 'signal',
        # 请求参数
        'parser', 'request', 'url', 'headers',
        # 请求配置
        'request_handler', 'request_timeout', 'request_max_size',
        # 连接管理
        '_total_request_size', '_timeout_handler', '_last_communication_time')

    def __init__(self, *, loop, request_handler, error_handler,
                 signal=Signal(), connections={}, request_timeout=60,
                 request_max_size=None):
        self.loop = loop                            # 事件循环
        self.transport = None
        self.request = None                         # 请求
        self.parser = None
        self.url = None                             # 预留的路径
        self.headers = None                         # 请求头
        self.signal = signal                        # 标志是否结束
        self.connections = connections              # 连接集合
        self.request_handler = request_handler      # 请求处理器
        self.error_handler = error_handler          # 出错处理器
        self.request_timeout = request_timeout      # 请求超时时间
        self.request_max_size = request_max_size    # 请求最大大小
        self._total_request_size = 0
        self._timeout_handler = None
        self._last_request_time = None
        self._request_handler_task = None

    # -------------------------------------------- #
    # 连接部分
    # -------------------------------------------- #

    def connection_made(self, transport):
        """
        创建连接
        """
        self.connections.add(self)
        self._timeout_handler = self.loop.call_later(
            self.request_timeout, self.connection_timeout)
        self.transport = transport
        self._last_request_time = current_time

    def connection_lost(self, exc):
        """
        丢失连接
        """
        self.connections.discard(self)
        self._timeout_handler.cancel()
        self.cleanup()

    def connection_timeout(self):
        """
        连接超时
        """
        time_elapsed = current_time - self._last_request_time   # 计算与上次请求间隔
        if time_elapsed < self.request_timeout: # 未超时
            time_left = self.request_timeout - time_elapsed
            self._timeout_handler = \
                self.loop.call_later(time_left, self.connection_timeout)
        else:   # 超时
            if self._request_handler_task:
                self._request_handler_task.cancel()
            exception = RequestTimeout('Request Timeout')
            self.write_error(exception)

    # -------------------------------------------- #
    # 解析部分
    # -------------------------------------------- #

    def data_received(self, data):
        """
        接受数据
        """
        self._total_request_size += len(data)
        if self._total_request_size > self.request_max_size:    # 请求数据过大
            # 在`exceptions.py`中添加 PayloadTooLarge 错误
            exception = PayloadTooLarge('Payload Too Large')
            self.write_error(exception)

        # 如果是第一次接受数据,创建 parser
        if self.parser is None:
            assert self.request is None
            self.headers = []
            self.parser = HttpRequestParser(self)

        # 解析请求
        try:
            self.parser.feed_data(data)
        except HttpParserError:
            exception = InvalidUsage('Bad Request')
            self.write_error(exception)

    def on_url(self, url):
        """
        获得 url
        """
        self.url = url

    def on_header(self, name, value):
        """
        补全 HTTP 请求的 head 信息
        """
        if name == b'Content-Length' and int(value) > self.request_max_size:
            exception = PayloadTooLarge('Payload Too Large')
            self.write_error(exception)

        self.headers.append((name.decode(), value.decode('utf-8')))

    def on_headers_complete(self):
        """
        写入 HTTP 请求 head 信息
        """
        # 远程地址
        remote_addr = self.transport.get_extra_info('peername')
        if remote_addr:
            self.headers.append(('Remote-Addr', '%s:%s' % remote_addr))

        # HTTP 请求 head
        self.request = Request(
            url_bytes=self.url,
            headers=CIMultiDict(self.headers),
            version=self.parser.get_http_version(),
            method=self.parser.get_method().decode()
        )

    def on_body(self, body):
        """
        写入 HTTP 请求 body
        """
        if self.request.body:
            self.request.body += body
        else:
            self.request.body = body

    def on_message_complete(self):
        """
        创建 task
        """
        self._request_handler_task = self.loop.create_task(
            self.request_handler(self.request, self.write_response))

    # -------------------------------------------- #
    # 响应部分
    # -------------------------------------------- #

    def write_response(self, response):
        """
        编写 HTTP 响应
        """
        try:
            keep_alive = self.parser.should_keep_alive() \
                            and not self.signal.stopped
            # 输出响应
            self.transport.write(
                response.output(
                    self.request.version, keep_alive, self.request_timeout))
            if not keep_alive:
                self.transport.close()
            else:
                # 记录接收到的数据
                self._last_request_time = current_time
                self.cleanup()
        except Exception as e:
            self.bail_out(
                "Writing response failed, connection closed {}".format(e))

    def write_error(self, exception):
        """
        编写 HTTP 错误响应
        """
        try:
            response = self.error_handler.response(self.request, exception)
            version = self.request.version if self.request else '1.1'
            self.transport.write(response.output(version))
            self.transport.close()
        except Exception as e:
            self.bail_out(
                "Writing error failed, connection closed {}".format(e))

    def bail_out(self, message):
        """
        记录异常辅助方法
        """
        exception = ServerError(message)
        self.write_error(exception)
        log.error(message)

    def cleanup(self):
        """
        清空请求字段
        """
        self.parser = None
        self.request = None
        self.url = None
        self.headers = None
        self._request_handler_task = None
        self._total_request_size = 0

    def close_if_idle(self):
        """
        若没有发生或接受请求,则关闭连接
        :return: boolean - True 为关, false 为保持开启
        """
        if not self.parser:
            self.transport.close()
            return True
        return False
Exemplo n.º 18
0
class BaseServer(asyncio.Protocol):
    def __init__(self, loop, requesthandle, toggle, connections=set(), request_timeout=10):
        self.loop = loop
        self.requesthandle = requesthandle
        self.toggle = toggle
        self.connections = connections
        self.request_timeout = request_timeout

        self.timehandle = None
        self.requesthandletask = None
        # 默认请求参数
        self.ip = None
        self.parse = None
        self.url = None
        self.headers = {}
        self.body = None
        self.httpversion = None
        self.method = None
        self.request = None
        self.contentlength = 0
        self.keep_alive = False
        # print("enter init")

    #######################
    # 连接建立
    def connection_made(self, transport):
        # 将本连接加入全局的连接集合
        print("connect start")
        self.connections.add(self)
        self.transport = transport
        self.ip = transport.get_extra_info("peername")
        # self.timehandle = self.loop.call_later(self.request_timeout, self.teardown)

    def connection_lost(self, exc):
        print("connect end")
        self.connections.remove(self)
        # self.timehandle.cancel()
        self.clean()

    ########################
    # 解析数据
    def data_received(self, data):
        pprint(data.decode("utf-8"))
        self.contentlength += len(data)
        if not self.parse:
            # HttpRequestParser响应当前对象的以下方法
            # - on_url(url:byte)
            # - on_header(name: bytes, value: bytes)
            # - on_headers_complete()
            # - on_body(body: bytes)
            # - on_message_complete()
            # get_http_version(self) -> str
            # def should_keep_alive(self) -> bool:
            self.parse = HttpRequestParser(self)
        try:
            self.parse.feed_data(data)
        except HttpRequestParser as e:
            pass


    def on_url(self, url):
        self.url = url

    def on_header(self, name, value):
        self.headers[name] = value

    def on_headers_complete(self):
        self.method = self.parse.get_method()
        self.httpversion = self.parse.get_http_version()
        self.keep_alive = self.parse.should_keep_alive()
        self.request = Request(self.ip, self.url, self.headers, self.method, self.httpversion,
                               self.request_timeout if self.keep_alive else None)

    def on_body(self, body):
        # print(body)
        self.body = body
        self.request.setbody(body)

    def on_message_complete(self):
        print("parser complete")
        self.requesthandletask = self.loop.create_task(self.requesthandle(self.request, self.write))

    #########################
    # 返回响应
    def write(self, response):
        print("start write")
        self.transport.write(response.make_response())
        print("end write")
        keep_alive = self.keep_alive and not self.toggle[0]
        if not keep_alive:
            self.transport.close()
        else:
            self.clean()

    ########################
    # 清理或者关闭连接
    def clean(self):
        self.requsettask = None
        self.parse = None
        self.url = None
        self.headers = {}
        self.body = None
        self.httpversion = None
        self.methods = None
        self.request = None
        self.contentlength = 0
        self.keep_alive = False

    def teardown(self):
        if not self.parse:
            self.transport.close()
            return True
        return False
Exemplo n.º 19
0
class HttpProtocol(asyncio.Protocol):
    __slots__ = (
        # event loop, connection
        'loop',
        'transport',
        'connections',
        'signal',
        # request params
        'parser',
        'request',
        'url',
        'headers',
        # request config
        'request_handler',
        'request_timeout',
        'request_max_size',
        # connection management
        '_total_request_size',
        '_timeout_handler',
        '_last_communication_time')

    def __init__(self,
                 *,
                 loop,
                 request_handler,
                 error_handler,
                 signal=Signal(),
                 connections={},
                 request_timeout=60,
                 request_max_size=None):
        self.loop = loop
        self.transport = None
        self.request = None  # 请求
        self.parser = None
        self.url = None
        self.headers = None  # 请求头
        self.signal = signal
        self.connections = connections
        self.request_handler = request_handler  # 请求处理器
        self.error_handler = error_handler  # 出错处理器
        self.request_timeout = request_timeout
        self.request_max_size = request_max_size
        self._total_request_size = 0
        self._timeout_handler = None
        self._last_request_time = None
        self._request_handler_task = None

    # -------------------------------------------- #
    # Connection
    # -------------------------------------------- #

    def connection_made(self, transport):
        self.connections.add(self)
        self._timeout_handler = self.loop.call_later(self.request_timeout,
                                                     self.connection_timeout)
        self.transport = transport
        self._last_request_time = current_time

    def connection_lost(self, exc):
        self.connections.discard(self)
        self._timeout_handler.cancel()
        self.cleanup()

    def connection_timeout(self):
        # Check if
        time_elapsed = current_time - self._last_request_time
        if time_elapsed < self.request_timeout:
            time_left = self.request_timeout - time_elapsed
            self._timeout_handler = \
                self.loop.call_later(time_left, self.connection_timeout)
        else:
            if self._request_handler_task:
                self._request_handler_task.cancel()
            exception = RequestTimeout('Request Timeout')
            self.write_error(exception)

    # -------------------------------------------- #
    # Parsing
    # -------------------------------------------- #

    def data_received(self, data):
        # Check for the request itself getting too large and exceeding
        # memory limits
        self._total_request_size += len(data)
        if self._total_request_size > self.request_max_size:
            exception = PayloadTooLarge('Payload Too Large')
            self.write_error(exception)

        # Create parser if this is the first time we're receiving data
        if self.parser is None:
            assert self.request is None
            self.headers = []
            self.parser = HttpRequestParser(self)

        # Parse request chunk or close connection
        try:
            self.parser.feed_data(data)
        except HttpParserError:
            exception = InvalidUsage('Bad Request')
            self.write_error(exception)

    def on_url(self, url):
        self.url = url

    #
    # HTTP 请求: 补全 head 信息
    #   -  更新 headers 字段
    #
    def on_header(self, name, value):
        if name == b'Content-Length' and int(value) > self.request_max_size:
            exception = PayloadTooLarge('Payload Too Large')
            self.write_error(exception)

        self.headers.append((name.decode(), value.decode('utf-8')))

    #
    # HTTP 请求: 写入 head 信息
    #
    def on_headers_complete(self):
        remote_addr = self.transport.get_extra_info('peername')
        if remote_addr:
            self.headers.append(('Remote-Addr', '%s:%s' % remote_addr))

        #
        # 构建 HTTP 请求
        #
        self.request = Request(url_bytes=self.url,
                               headers=CIMultiDict(self.headers),
                               version=self.parser.get_http_version(),
                               method=self.parser.get_method().decode())

    #
    # HTTP 请求: 写入 body 部分
    #
    def on_body(self, body):
        if self.request.body:
            self.request.body += body
        else:
            self.request.body = body

    def on_message_complete(self):
        #
        # 任务创建:
        #
        self._request_handler_task = self.loop.create_task(
            self.request_handler(self.request, self.write_response))

    # -------------------------------------------- #
    # Responding
    #   -  HTTP 响应部分
    # -------------------------------------------- #

    #
    # HTTP 响应: 正常响应
    #   - 写出 HTTP 响应
    #   - 长连接, 更新连接时间
    #
    def write_response(self, response):
        try:
            keep_alive = self.parser.should_keep_alive(
            ) and not self.signal.stopped
            #
            # 输出 HTTP 响应
            #
            self.transport.write(
                response.output(  # HTTP Response, 写一个响应
                    self.request.version, keep_alive, self.request_timeout))

            if not keep_alive:  # 非长连接, 关闭
                self.transport.close()
            else:
                # Record that we received data
                self._last_request_time = current_time
                self.cleanup()
        except Exception as e:
            self.bail_out(
                "Writing response failed, connection closed {}".format(e))

    #
    # HTTP 响应: 出错响应
    #
    def write_error(self, exception):
        try:
            response = self.error_handler.response(self.request,
                                                   exception)  # 出错响应处理
            version = self.request.version if self.request else '1.1'  # HTTP 协议版本
            self.transport.write(
                response.output(version))  # HTTP Response, 写一个响应
            self.transport.close()
        except Exception as e:
            self.bail_out(
                "Writing error failed, connection closed {}".format(e))

    #
    # 异常记录:
    #
    def bail_out(self, message):
        exception = ServerError(message)
        self.write_error(exception)
        log.error(message)

    #
    # 清理:
    #   - 将字段复位为空
    #
    def cleanup(self):
        self.parser = None
        self.request = None
        self.url = None
        self.headers = None
        self._request_handler_task = None
        self._total_request_size = 0

    def close_if_idle(self):
        """
        Close the connection if a request is not being sent or received
        :return: boolean - True if closed, false if staying open
        """
        if not self.parser:
            self.transport.close()
            return True
        return False
Exemplo n.º 20
0
class SimpleHttp(Protocol):
    ''' 
    simple-http protocol is a light http protocol,
    you can set proxy or gateway for it 
    '''

    __slots__ = ("loop", "conns", "router", "transport", "request", "response",
                 "parser", "request_limit_size", "request_cur_size",
                 "request_timeout", "response_timeout", "keep_alive",
                 "last_request_time", "remote_addr", "request_timeout_task",
                 "response_timeout_task", "conn_timeout_task")

    def __init__(
            self,
            *,
            loop,
            conns,  # server.conns
            router,  # path handler mgr
            request_limit_size=1024 * 1024 * 1,  # 1M
            request_timeout=60,
            response_timeout=60,
            keep_alive=10):

        self.loop = loop
        self.conns = conns
        self.router = router
        self.transport = None
        self.request = Request(self)
        self.response = Response(self)
        self.response.set_keep_alive()
        self.parser = HttpRequestParser(self)

        self.request_limit_size = request_limit_size
        self.request_cur_size = 0
        self.request_timeout = request_timeout
        self.response_timeout = response_timeout
        self.keep_alive = keep_alive
        self.last_request_time = 0

        self.remote_addr = None
        self.request_timeout_task = None
        self.response_timeout_task = None
        self.conn_timeout_task = None

        self.route_mgr = None

    def connection_made(self, transport):
        self.transport = transport
        self.remote_addr = transport.get_extra_info("peername")
        self.conns.add(self)

        self.last_request_time = time()
        self.request_timeout_task = self.loop.call_later(
            self.request_timeout, self.request_timeout_handler)

    def data_received(self, data):
        self.request_cur_size += len(data)
        if self.request_cur_size > self.request_limit_size:
            self.response.error(UnlightException(413))

        try:
            self.parser.feed_data(data)
        except HttpParserError:
            self.response.error(UnlightException(401))
            traceback.print_exc()

    def connection_lost(self, err):
        self.conns.discard(self)
        self._cancel_request_timeout_task()
        self._cancel_response_timeout_task()
        self._cancel_conn_timeout_task()

    @property
    def is_keep_alive(self):
        return self.keep_alive and self.parser.should_keep_alive

    def write(self, enc_data):
        ''' write and try keep alive '''
        try:
            self.transport.write(enc_data)
        except RuntimeError:
            unlight_logger.error(
                "Connection lost before response written @ %s",
                self.remote_addr if self.remote_addr else "Unknown")
        finally:
            if self.is_keep_alive:
                self._cancel_conn_timeout_task()
                self.conn_timeout_task = self.loop.call_later(
                    self.keep_alive, self.keep_alive_timeout_handler)
                self.reset()
            else:
                self.transport.close()
                self.transport = None

    def fatal(self, enc_err):
        ''' wirte and close '''
        try:
            self.transport.write(enc_err)
        except RuntimeError:
            unlight_logger.error(
                "Connection lost before error written @ %s",
                self.remote_addr if self.remote_addr else "Unknown")
        finally:
            try:
                self.transport.close()
                self.transport = None
            except AttributeError:
                unlight_logger.error(
                    "Connection lost before server could close it.")

    def reset(self):
        self.request_cur_size = 0
        self.request.reset()
        self.response.reset()
        self._cancel_response_timeout_task()

    def request_timeout_handler(self):
        self._cancel_request_timeout_task()
        self.response.error(UnlightException(408))

    def response_timeout_handler(self):
        self._cancel_response_timeout_task()
        self.response.error(UnlightException(502))

    def keep_alive_timeout_handler(self):
        self._cancel_request_timeout_task()
        self.transport.close()
        self.transport = None

    def _cancel_request_timeout_task(self):
        if self.request_timeout_task:
            self.request_timeout_task.cancel()
            self.request_timeout_task = None

    def _cancel_response_timeout_task(self):
        if self.response_timeout_task:
            self.response_timeout_task.cancel()
            self.response_timeout_task = None

    def _cancel_conn_timeout_task(self):
        if self.conn_timeout_task:
            self.conn_timeout_task.cancel()
            self.conn_timeout_task = None

    def on_url(self, burl):
        self.request.add_burl(burl)

    def on_header(self, bname, bvalue):
        self.request.add_bheader(bname, bvalue)
        if bname.lower(
        ) == b"content-length" and int(bvalue) > self.request_limit_size:
            self.response.error(UnlightException(413))
        if bname.lower() == b"expect" and bvalue.lower() == b"100-continue":
            self.response.error(UnlightException(100))

    def on_header_complete(self):
        self.response_timeout_task = self.loop.call_later(
            self.response_timeout, self.response_timeout_handler)
        self._cancel_request_timeout_task()

    def on_body(self, bbody):
        if self.request.add_bbody(bbody) == 2:
            self.response.error(UnlightException(400))  # parse err

    def on_message_complete(self):
        self.request.set_method(self.parser.get_method().decode())
        self.loop.create_task(
            self.router.handle_request(self.request, self.response))
Exemplo n.º 21
0
class HttpProtocol(asyncio.Protocol):

    __slots__ = ("_route", "_loop", "_transport", "_parser", "_request")

    def __init__(self, event_loop=None, route=None, re_route=None):
        self._route = route
        self._re_route = re_route
        self._loop = event_loop
        self._transport = None
        self._parser = HttpRequestParser(self)
        self._request = Request()

    def connection_made(self, transport):
        self._transport = transport

    def data_received(self, data):
        try:
            self._parser.feed_data(data)
        except HttpParserError:
            pass

    def connection_lost(self, exc):
        self._transport.close()

    def on_url(self, uri):
        self._request.uri = uri.decode()

    def on_header(self, name, value):
        self._request.headers[name] = value

    def on_headers_complete(self):

        self._request.version = self._parser.get_http_version()
        self._request.method = self._parser.get_method().decode()

    def on_body(self, body):
        self._request.body.append(body)

    def on_message_complete(self):
        if self._request.body:
            self._request.body = b"".join(self._request.body)

        self._loop.create_task(
            self.start_response(request=self._request,
                                transport=self._transport))

    async def start_response(self, transport, request):

        try:
            _view = self._route.get(request.url, self._re_route)

            if isinstance(_view, list):
                for _re_route_tuple in _view:

                    regex, nums, view = _re_route_tuple
                    _re_uri = re.match(regex, request.url)

                    if _re_uri:
                        args = [_re_uri.group(i + 1) for i in range(nums)]
                        content = await view(request, *args)
                        break
                else:
                    print(request.url)
                    raise RouterError("404 %s" % request.url)
            else:
                content = await _view(request)

            transport.write(content)
        except RouterError:
            transport.write(
                b'HTTP/1.1 404 Not Found\r\nServer: aquarius\r\nContent-Length:9\r\n\r\nNot Found\r\n\r\n'
            )
        except ValueError:
            transport.write(
                b'HTTP/1.1 404 Not Found\r\nServer: aquarius\r\nContent-Length:9\r\n\r\nNot Found\r\n\r\n'
            )
        except AttributeError:
            transport.write(
                b'HTTP/1.1 404 Not Found\r\nServer: aquarius\r\nContent-Length:9\r\n\r\nNot Found\r\n\r\n'
            )

        if request.version == "1.0":
            transport.close()
Exemplo n.º 22
0
Arquivo: http1.py Projeto: zozzz/vizen
class HTTP1Protocol(AbstractProtocol):
    __slots__ = ("parser", "headers", "request", "response", "url",
                 "body_parser")

    parser: HttpRequestParser
    body_parser: BodyParser
    request: Request
    response: Response
    headers: Headers
    url: Any

    def __init__(self):
        super().__init__()
        self.parser = HttpRequestParser(self)
        self.headers = Headers()
        self.body_parser = None

    # ---------------- #
    # PROTOCOL METHODS #
    # ---------------- #

    def connection_lost(self, exc):
        # print("HTTP1Protocol.connection_lost")
        pass

    def pause_writing(self):
        print("HTTP1Protocol.pause_writing")

    def resume_writing(self):
        print("HTTP1Protocol.resume_writing")

    def data_received(self, data):
        self.parser.feed_data(data)

    def eof_received(self):
        print("HTTP1Protocol.eof_received")

    # --------------------- #
    # PARSER EVENT HANDLERS #
    # --------------------- #

    def on_url(self, url: bytes) -> None:
        self.url = parse_url(url)

    def on_header(self, name: bytes, value: bytes) -> None:
        self.headers[name] = value

    def on_headers_complete(self) -> None:
        injector = self.injector.descend()

        method = self.parser.get_method()
        if method == b"POST":
            if b"content-type" in self.headers:
                ct, params = parse_header(
                    self.headers[b"content-type"].decode("ASCII"))

                if ct == "multipart/form-data":
                    self.body_parser = FormDataParser(
                        params["boundary"].encode("ASCII"))

        if self.body_parser is None:
            self.body_parser = RawBody()

        injector[BodyParser] = self.body_parser

        response = self.response = injector[Response] = injector[Response]
        request = self.request = injector[Request] = injector[Request]

        request.method = method
        request.version = response.version = self.parser.get_http_version()
        request.url = self.url
        request.headers = self.headers

        injector[Cookie] = injector[Cookie]

        task = self.loop.create_task(request())
        task.injector = injector
        task.add_done_callback(self.__finalize_task)
        request.on_headers.set()

    # def on_message_begin(self):
    #     print("on_message_begin")

    def on_body(self, body: bytes):
        if self.body_parser is not None:
            self.body_parser.feed(body)

    def on_message_complete(self):
        self.body_parser.process()
        self.body_parser = None
        self.request.on_body.set()

    def on_chunk_header(self):
        print("on_chunk_header")

    def on_chunk_complete(self):
        print("on_chunk_header")

    def __finalize_task(self, task):
        if task.cancelled():
            response = task.injector[Response]
            if not response.headers_sent:
                self.loop.create_task(response.begin(503))
        else:
            exc = task.exception()
            if exc is not None:
                self.loop.create_task(handle_error(task.injector, exc))
Exemplo n.º 23
0
class ProxyProtocol(HttpProtocol):
    __slots__ = (
        # event loop, connection
        'loop',
        'transport',
        'connections',
        'signal',
        # request params
        'parser',
        'request',
        'url',
        'headers',
        # request config
        'request_handler',
        'request_timeout',
        'response_timeout',
        'keep_alive_timeout',
        'request_max_size',
        'request_class',
        'is_request_stream',
        'router',
        # enable or disable access log purpose
        'access_log',
        # connection management
        '_total_request_size',
        '_request_timeout_handler',
        '_response_timeout_handler',
        '_keep_alive_timeout_handler',
        '_last_request_time',
        '_last_response_time',
        '_is_stream_handler',
        '_not_paused')

    def __init__(self,
                 *,
                 loop,
                 request_handler,
                 error_handler,
                 signal=Signal(),
                 connections=set(),
                 request_timeout=60,
                 response_timeout=60,
                 keep_alive_timeout=5,
                 request_max_size=None,
                 request_class=None,
                 access_log=True,
                 keep_alive=True,
                 is_request_stream=False,
                 router=None,
                 state=None,
                 debug=False,
                 **kwargs):
        self.loop = loop
        self.transport = None
        self.request = None
        self.parser = None
        self.url = None
        self.headers = None
        self.router = router
        self.signal = signal
        self.access_log = access_log
        self.connections = connections
        self.request_handler = request_handler
        self.error_handler = error_handler
        self.request_timeout = request_timeout
        self.response_timeout = response_timeout
        self.keep_alive_timeout = keep_alive_timeout
        self.request_max_size = request_max_size
        self.request_class = request_class or ProxyRequest
        self.is_request_stream = is_request_stream
        self._is_stream_handler = False
        self._not_paused = asyncio.Event(loop=loop)
        self._total_request_size = 0
        self._request_timeout_handler = None
        self._response_timeout_handler = None
        self._keep_alive_timeout_handler = None
        self._last_request_time = None
        self._last_response_time = None
        self._request_handler_task = None
        self._request_stream_task = None
        self._keep_alive = keep_alive
        self._header_fragment = b''
        self.state = state if state else {}
        if 'requests_count' not in self.state:
            self.state['requests_count'] = 0
        self._debug = debug
        self._not_paused.set()

        self._raw = asyncio.Queue()  # 原始数据都保存
        self._is_proxy = False  # for proxy

    def data_received(self, data):
        self._raw.put_nowait(data)  # 原始数据都保存
        # Check for the request itself getting too large and exceeding
        # memory limits
        self._total_request_size += len(data)
        if self._total_request_size > self.request_max_size:
            exception = PayloadTooLarge('Payload Too Large')
            self.write_error(exception)
        if self._is_proxy:  # 如果是代理过程那么就不要再去parse了
            return
        # Create parser if this is the first time we're receiving data
        if self.parser is None:
            assert self.request is None
            self.headers = []
            self.parser = HttpRequestParser(self)

        # requests count
        self.state['requests_count'] = self.state['requests_count'] + 1

        # Parse request chunk or close connection
        try:
            self.parser.feed_data(data)
        except HttpParserInvalidMethodError as e:  # CONNECT 包
            pass
        except HttpParserUpgrade:  # CONNECT 包
            pass
        except HttpParserError:
            message = 'Bad Request'
            if self._debug:
                message += '\n' + traceback.format_exc()
            exception = InvalidUsage(message)
            self.write_error(exception)

    def cleanup(self):
        """This is called when KeepAlive feature is used,
        it resets the connection in order for it to be able
        to handle receiving another request on the same connection."""
        self.parser = None
        self.request = None
        self.url = None
        self.headers = None
        self._request_handler_task = None
        self._request_stream_task = None
        self._total_request_size = 0
        self._is_stream_handler = False

        self._raw = asyncio.Queue()  # for proxy 清理
        self._is_proxy = False  # for proxy

    def on_headers_complete(self):
        self.request = self.request_class(
            url_bytes=self.url,
            headers=CIMultiDict(self.headers),
            version=self.parser.get_http_version(),
            method=self.parser.get_method().decode(),
            transport=self.transport)
        self.request['_raw'] = self._raw  # 原始数据包交给request处理函数
        # Remove any existing KeepAlive handler here,
        # It will be recreated if required on the new request.
        if self._keep_alive_timeout_handler:
            self._keep_alive_timeout_handler.cancel()
            self._keep_alive_timeout_handler = None
        if self.is_request_stream:
            self._is_stream_handler = self.router.is_stream_handler(
                self.request)
            if self._is_stream_handler:
                self.request.stream = asyncio.Queue()
                self.execute_request_handler()

    def on_message_complete(self):
        # Entire request (headers and whole body) is received.
        # We can cancel and remove the request timeout handler now.
        if self._request_timeout_handler:
            self._request_timeout_handler.cancel()
            self._request_timeout_handler = None
        if self.is_request_stream and self._is_stream_handler:
            self._request_stream_task = self.loop.create_task(
                self.request.stream.put(None))
            return
        self.request.body = b''.join(self.request.body)
        if self.request.method == 'CONNECT' or self.request.raw_url.startswith(
                b'http'):
            self._is_proxy = True  # CONNET 包或者url是完整url的包就是代理包
        self.execute_request_handler()

    def write_response(self, response):
        """
        Writes response content synchronously to the transport.
        """
        if self._response_timeout_handler:
            self._response_timeout_handler.cancel()
            self._response_timeout_handler = None
        try:
            keep_alive = self.keep_alive
            self.transport.write(
                response.output(self.request.version, keep_alive,
                                self.keep_alive_timeout))
            self.log_response(response)
        except AttributeError:
            logger.error(
                'Invalid response object for url %s, '
                'Expected Type: HTTPResponse, Actual Type: %s', self.url,
                type(response))
            self.write_error(ServerError('Invalid response type'))
        except RuntimeError:
            if self._debug:
                logger.error('Connection lost before response written @ %s',
                             self.request.ip)
            keep_alive = False
        except Exception as e:
            self.bail_out(
                "Writing response failed, connection closed {}".format(
                    repr(e)))
        finally:
            if not keep_alive:
                self.transport.close()
                self.transport = None
            else:
                self._keep_alive_timeout_handler = self.loop.call_later(
                    self.keep_alive_timeout, self.keep_alive_timeout_callback)
                self._last_response_time = sanic_server.current_time

                if self.request.method == 'CONNECT':
                    return  #https 代理的握手请求完了,可不能清理,后面还要代理的
                self.cleanup()
Exemplo n.º 24
0
class HttpProtocol(asyncio.Protocol):
  # http://book.pythontips.com/en/latest/__slots__magic.html
  # def __init__(self, *, loop, request_handler, error_handler, signal, connections, request_timeout) -> None:
  def __init__(self, params: panic_datatypes.ServerParams):
    self.params = params
    self.loop = params.loop
    self.transport = None
    self.request = None
    self.parser = None
    self.url = None
    self.headers = None
    self.signal = params.signal
    self.connections = params.connections
    self.request_handler = params.request_handler
    self.request_timeout = params.request_timeout
    self._total_request_size = 0
    self._timeout_handler = None
    self._last_request_time = None
    self._request_handler_task = None
    self._identity = uuid.uuid4()

  # -------------------------------------------- #
  # Connection
  # -------------------------------------------- #
  def connection_made(self, transport):
    self.connections.add(self)
    self._timeout_handler = self.loop.call_later(self.request_timeout, self.connection_timeout)
    self.transport = transport
    self._last_request_time = datetime.datetime.utcnow()

  def connection_lost(self, exc):
    self.connections.discard(self)
    self._timeout_handler.cancel()
    self.cleanup()

  def connection_timeout(self):
    time_elapsed = datetime.datetime.utcnow() - self._last_request_time
    try:
      if time_elapsed.seconds < self.request_timeout:
        time_left = self.request_timeout - time_elapsed.seconds
        self._timeout_handler = self.loop.call_later(time_left, self.connection_timeout)
    except Exception as err:
      print(err)
      import ipdb; ipdb.set_trace()
      pass

    else:
      if self._request_handler_task:
        self._request_handler_task.cancel()

      exception = panic_exceptions.RequestTimeout('Request Timeout')
      self.write_error(exception)

  # -------------------------------------------- #
  # Parsing
  # -------------------------------------------- #

  def data_received(self, data):
    # Check for the request itself getting too large and exceeding
    # memory limits
    # TODO: ^
    self._total_request_size += len(data)

    # Create parser if this is the first time we're receiving data
    if self.parser is None:
      assert self.request is None
      self.headers = panic_datatypes.HTTPHeaders()
      self.parser = HttpRequestParser(self)

    # Parse request chunk or close connection
    try:
      self.parser.feed_data(data)
    except HttpParserError as err:
      import ipdb;ipdb.set_trace()
      exception = panic_exceptions.InvalidUsage('Bad Request')
      self.write_error(exception)

  def on_url(self, url):
    self.url = url

  def on_header(self, name, value):
    #if name == b'Content-Length' and int(value) > 1000:
    #  exception = PayloadTooLarge('Payload Too Large')
    #  self.write_error(exception)

    self.headers.append(name.decode(), value.decode('utf-8'))

  def on_headers_complete(self):
    remote_addr = self.transport.get_extra_info('peername')
    if remote_addr:
      self.headers.append(remote_addr[0], str(remote_addr[1]))

    self.request = panic_request.Request(
      url = self.url,
      headers = self.headers,
      version = self.parser.get_http_version(),
      method = panic_datatypes.HTTPMethod.Match(self.parser.get_method().decode())
    )

  def on_body(self, body):
    self.request.body.append(body)

  def on_message_complete(self):
    self._request_handler_task = self.loop.create_task(self.request_handler(self.request, self.write_response))

  # -------------------------------------------- #
  # Responding
  # -------------------------------------------- #

  def write_response(self, response):
    if self.parser:
      keep_alive = self.parser.should_keep_alive() and not self.signal.stopped

    else:
      keep_alive = False

    try:
      self.transport.write(response.output(getattr(self.request, 'version', '1.1')))
    except RuntimeError as err:
      logger.error(err)

    except Exception as err:
      import ipdb; ipdb.set_trace()
      pass

    if keep_alive:
      self._last_request_time = datetime.datetime.utcnow()
      self.cleanup()

    else:
      self.transport.close()

  def write_error(self, exception):
    try:
      response = self.params.error_handler(self.request, exception)
      version = self.request.version if self.request else '1.1'
      self.transport.write(response.output(float(version)))
      self.transport.close()
    except panic_exceptions.RequestTimeout:
      exception = panic_exceptions.ServerError('RT')
      exception.status = 408
      response = self.params.error_handler(self.request, exception)
      version = self.request.version if self.request else '1.1'
      self.transport.write(response.output(float(version)))
      self.transport.close()
      #self.write_error(exception)

    except Exception as err:
      # logger.exception(err)
      import traceback
      traceback.print_stack()
      import ipdb;ipdb.set_trace()
      import sys; sys.exit(1)
      self.bail_out("Writing error failed, connection closed {}".format(e))

  def bail_out(self, message):
    exception = ServerError(message)
    self.write_error(exception)
    logger.error(message)

  def cleanup(self):
    self.parser = None
    self.request = None
    self.url = None
    self.headers = None
    self._request_handler_task = None

  def close_if_idle(self):
    """
    Close the connection if a request is not being sent or received
    :return: boolean - True if closed, false if staying open
    """
    if not self.parser:
      self.transport.close()
      return True

    return False
Exemplo n.º 25
0
class HttpProtocol(asyncio.Protocol):
    __slots__ = (
        # event loop, connection
        'loop', 'transport', 'connections', 'signal',
        # request params
        'parser', 'request', 'url', 'headers',
        # request config
        'request_handler', 'request_timeout', 'response_timeout',
        'keep_alive_timeout', 'request_max_size', 'request_class',
        'is_request_stream', 'router',
        # enable or disable access log purpose
        'access_log',
        # connection management
        '_total_request_size', '_request_timeout_handler',
        '_response_timeout_handler', '_keep_alive_timeout_handler',
        '_last_request_time', '_last_response_time', '_is_stream_handler')

    def __init__(self, *, loop, request_handler, error_handler,
                 signal=Signal(), connections=set(), request_timeout=60,
                 response_timeout=60, keep_alive_timeout=5,
                 request_max_size=None, request_class=None, access_log=True,
                 keep_alive=True, is_request_stream=False, router=None,
                 state=None, debug=False, **kwargs):
        self.loop = loop
        self.transport = None
        self.request = None
        self.parser = None
        self.url = None
        self.headers = None
        self.router = router
        self.signal = signal
        self.access_log = access_log
        self.connections = connections
        self.request_handler = request_handler
        self.error_handler = error_handler
        self.request_timeout = request_timeout
        self.response_timeout = response_timeout
        self.keep_alive_timeout = keep_alive_timeout
        self.request_max_size = request_max_size
        self.request_class = request_class or Request
        self.is_request_stream = is_request_stream
        self._is_stream_handler = False
        self._total_request_size = 0
        self._request_timeout_handler = None
        self._response_timeout_handler = None
        self._keep_alive_timeout_handler = None
        self._last_request_time = None
        self._last_response_time = None
        self._request_handler_task = None
        self._request_stream_task = None
        self._keep_alive = keep_alive
        self._header_fragment = b''
        self.state = state if state else {}
        if 'requests_count' not in self.state:
            self.state['requests_count'] = 0
        self._debug = debug

    @property
    def keep_alive(self):
        return (
            self._keep_alive and
            not self.signal.stopped and
            self.parser.should_keep_alive())

    # -------------------------------------------- #
    # Connection
    # -------------------------------------------- #

    def connection_made(self, transport):
        self.connections.add(self)
        self._request_timeout_handler = self.loop.call_later(
            self.request_timeout, self.request_timeout_callback)
        self.transport = transport
        self._last_request_time = current_time

    def connection_lost(self, exc):
        self.connections.discard(self)
        if self._request_timeout_handler:
            self._request_timeout_handler.cancel()
        if self._response_timeout_handler:
            self._response_timeout_handler.cancel()
        if self._keep_alive_timeout_handler:
            self._keep_alive_timeout_handler.cancel()

    def request_timeout_callback(self):
        # See the docstring in the RequestTimeout exception, to see
        # exactly what this timeout is checking for.
        # Check if elapsed time since request initiated exceeds our
        # configured maximum request timeout value
        time_elapsed = current_time - self._last_request_time
        if time_elapsed < self.request_timeout:
            time_left = self.request_timeout - time_elapsed
            self._request_timeout_handler = (
                self.loop.call_later(time_left,
                                     self.request_timeout_callback)
            )
        else:
            if self._request_stream_task:
                self._request_stream_task.cancel()
            if self._request_handler_task:
                self._request_handler_task.cancel()
            try:
                raise RequestTimeout('Request Timeout')
            except RequestTimeout as exception:
                self.write_error(exception)

    def response_timeout_callback(self):
        # Check if elapsed time since response was initiated exceeds our
        # configured maximum request timeout value
        time_elapsed = current_time - self._last_request_time
        if time_elapsed < self.response_timeout:
            time_left = self.response_timeout - time_elapsed
            self._response_timeout_handler = (
                self.loop.call_later(time_left,
                                     self.response_timeout_callback)
            )
        else:
            try:
                raise ServiceUnavailable('Response Timeout')
            except ServiceUnavailable as exception:
                self.write_error(exception)

    def keep_alive_timeout_callback(self):
        # Check if elapsed time since last response exceeds our configured
        # maximum keep alive timeout value
        time_elapsed = current_time - self._last_response_time
        if time_elapsed < self.keep_alive_timeout:
            time_left = self.keep_alive_timeout - time_elapsed
            self._keep_alive_timeout_handler = (
                self.loop.call_later(time_left,
                                     self.keep_alive_timeout_callback)
            )
        else:
            logger.info('KeepAlive Timeout. Closing connection.')
            self.transport.close()
            self.transport = None

    # -------------------------------------------- #
    # Parsing
    # -------------------------------------------- #

    def data_received(self, data):
        # Check for the request itself getting too large and exceeding
        # memory limits
        self._total_request_size += len(data)
        if self._total_request_size > self.request_max_size:
            exception = PayloadTooLarge('Payload Too Large')
            self.write_error(exception)

        # Create parser if this is the first time we're receiving data
        if self.parser is None:
            assert self.request is None
            self.headers = []
            self.parser = HttpRequestParser(self)

        # requests count
        self.state['requests_count'] = self.state['requests_count'] + 1

        # Parse request chunk or close connection
        try:
            self.parser.feed_data(data)
        except HttpParserError:
            message = 'Bad Request'
            if self._debug:
                message += '\n' + traceback.format_exc()
            exception = InvalidUsage(message)
            self.write_error(exception)

    def on_url(self, url):
        if not self.url:
            self.url = url
        else:
            self.url += url

    def on_header(self, name, value):
        self._header_fragment += name

        if value is not None:
            if self._header_fragment == b'Content-Length' \
                    and int(value) > self.request_max_size:
                exception = PayloadTooLarge('Payload Too Large')
                self.write_error(exception)
            try:
                value = value.decode()
            except UnicodeDecodeError:
                value = value.decode('latin_1')
            self.headers.append(
                    (self._header_fragment.decode().casefold(), value))

            self._header_fragment = b''

    def on_headers_complete(self):
        self.request = self.request_class(
            url_bytes=self.url,
            headers=CIDict(self.headers),
            version=self.parser.get_http_version(),
            method=self.parser.get_method().decode(),
            transport=self.transport
        )
        # Remove any existing KeepAlive handler here,
        # It will be recreated if required on the new request.
        if self._keep_alive_timeout_handler:
            self._keep_alive_timeout_handler.cancel()
            self._keep_alive_timeout_handler = None
        if self.is_request_stream:
            self._is_stream_handler = self.router.is_stream_handler(
                self.request)
            if self._is_stream_handler:
                self.request.stream = asyncio.Queue()
                self.execute_request_handler()

    def on_body(self, body):
        if self.is_request_stream and self._is_stream_handler:
            self._request_stream_task = self.loop.create_task(
                self.request.stream.put(body))
            return
        self.request.body.append(body)

    def on_message_complete(self):
        # Entire request (headers and whole body) is received.
        # We can cancel and remove the request timeout handler now.
        if self._request_timeout_handler:
            self._request_timeout_handler.cancel()
            self._request_timeout_handler = None
        if self.is_request_stream and self._is_stream_handler:
            self._request_stream_task = self.loop.create_task(
                self.request.stream.put(None))
            return
        self.request.body = b''.join(self.request.body)
        self.execute_request_handler()

    def execute_request_handler(self):
        self._response_timeout_handler = self.loop.call_later(
            self.response_timeout, self.response_timeout_callback)
        self._last_request_time = current_time
        self._request_handler_task = self.loop.create_task(
            self.request_handler(
                self.request,
                self.write_response,
                self.stream_response))

    # -------------------------------------------- #
    # Responding
    # -------------------------------------------- #
    def log_response(self, response):
        if self.access_log:
            extra = {
                'status': getattr(response, 'status', 0),
            }

            if isinstance(response, HTTPResponse):
                extra['byte'] = len(response.body)
            else:
                extra['byte'] = -1

            extra['host'] = 'UNKNOWN'
            if self.request is not None:
                if self.request.ip:
                    extra['host'] = '{0[0]}:{0[1]}'.format(self.request.ip)

                extra['request'] = '{0} {1}'.format(self.request.method,
                                                    self.request.url)
            else:
                extra['request'] = 'nil'

            access_logger.info('', extra=extra)

    def write_response(self, response):
        """
        Writes response content synchronously to the transport.
        """
        if self._response_timeout_handler:
            self._response_timeout_handler.cancel()
            self._response_timeout_handler = None
        try:
            keep_alive = self.keep_alive
            self.transport.write(
                response.output(
                    self.request.version, keep_alive,
                    self.keep_alive_timeout))
            self.log_response(response)
        except AttributeError:
            logger.error('Invalid response object for url %s, '
                         'Expected Type: HTTPResponse, Actual Type: %s',
                         self.url, type(response))
            self.write_error(ServerError('Invalid response type'))
        except RuntimeError:
            if self._debug:
                logger.error('Connection lost before response written @ %s',
                             self.request.ip)
            keep_alive = False
        except Exception as e:
            self.bail_out(
                "Writing response failed, connection closed {}".format(
                    repr(e)))
        finally:
            if not keep_alive:
                self.transport.close()
                self.transport = None
            else:
                self._keep_alive_timeout_handler = self.loop.call_later(
                    self.keep_alive_timeout,
                    self.keep_alive_timeout_callback)
                self._last_response_time = current_time
                self.cleanup()

    async def stream_response(self, response):
        """
        Streams a response to the client asynchronously. Attaches
        the transport to the response so the response consumer can
        write to the response as needed.
        """
        if self._response_timeout_handler:
            self._response_timeout_handler.cancel()
            self._response_timeout_handler = None
        try:
            keep_alive = self.keep_alive
            response.transport = self.transport
            await response.stream(
                self.request.version, keep_alive, self.keep_alive_timeout)
            self.log_response(response)
        except AttributeError:
            logger.error('Invalid response object for url %s, '
                         'Expected Type: HTTPResponse, Actual Type: %s',
                         self.url, type(response))
            self.write_error(ServerError('Invalid response type'))
        except RuntimeError:
            if self._debug:
                logger.error('Connection lost before response written @ %s',
                             self.request.ip)
            keep_alive = False
        except Exception as e:
            self.bail_out(
                "Writing response failed, connection closed {}".format(
                    repr(e)))
        finally:
            if not keep_alive:
                self.transport.close()
                self.transport = None
            else:
                self._keep_alive_timeout_handler = self.loop.call_later(
                    self.keep_alive_timeout,
                    self.keep_alive_timeout_callback)
                self._last_response_time = current_time
                self.cleanup()

    def write_error(self, exception):
        # An error _is_ a response.
        # Don't throw a response timeout, when a response _is_ given.
        if self._response_timeout_handler:
            self._response_timeout_handler.cancel()
            self._response_timeout_handler = None
        response = None
        try:
            response = self.error_handler.response(self.request, exception)
            version = self.request.version if self.request else '1.1'
            self.transport.write(response.output(version))
        except RuntimeError:
            if self._debug:
                logger.error('Connection lost before error written @ %s',
                             self.request.ip if self.request else 'Unknown')
        except Exception as e:
            self.bail_out(
                "Writing error failed, connection closed {}".format(
                    repr(e)), from_error=True
            )
        finally:
            if self.parser and (self.keep_alive
                                or getattr(response, 'status', 0) == 408):
                self.log_response(response)
            self.transport.close()

    def bail_out(self, message, from_error=False):
        if from_error or self.transport.is_closing():
            logger.error("Transport closed @ %s and exception "
                         "experienced during error handling",
                         self.transport.get_extra_info('peername'))
            logger.debug('Exception:\n%s', traceback.format_exc())
        else:
            exception = ServerError(message)
            self.write_error(exception)
            logger.error(message)

    def cleanup(self):
        """This is called when KeepAlive feature is used,
        it resets the connection in order for it to be able
        to handle receiving another request on the same connection."""
        self.parser = None
        self.request = None
        self.url = None
        self.headers = None
        self._request_handler_task = None
        self._request_stream_task = None
        self._total_request_size = 0
        self._is_stream_handler = False

    def close_if_idle(self):
        """Close the connection if a request is not being sent or received

        :return: boolean - True if closed, false if staying open
        """
        if not self.parser:
            self.transport.close()
            return True
        return False

    def close(self):
        """
        Force close the connection.
        """
        if self.transport is not None:
            self.transport.close()
            self.transport = None
Exemplo n.º 26
0
class BaseHTTPRequestHandler(BaseRequestHandler):
    request_klass = HTTPRequest
    response_klass = HTTPResponse

    def __init__(self, *args, **kwargs) -> None:
        super().__init__(*args, **kwargs)
        self.parser = HttpRequestParser(self)

        self._url: bytes = b""
        self._headers: tp.Dict[bytes, bytes] = {}
        self._body: bytes = b""
        self._parsed = False

    def handle(self) -> None:
        request = self.parse_request()
        if request:
            try:
                response = self.handle_request(request)
            except Exception:
                print("Ooops... Error 500")
                response = self.response_klass(status=500, headers={}, body=b"")
        else:
            response = self.response_klass(status=400, headers={}, body=b"")
        self.handle_response(response)
        self.close()

    def parse_request(self) -> tp.Optional[HTTPRequest]:
        while not self._parsed:
            try:
                data = self.socket.recv(1024)
                if data == b"":
                    print("Ok. Parsed done successfully")
                    break
                self.parser.feed_data(data)
            except socket.timeout:
                print("Ooops... Timeout!")
                break
            except (
                HttpParserError,
                HttpParserCallbackError,
                HttpParserInvalidStatusError,
                HttpParserInvalidMethodError,
                HttpParserInvalidURLError,
                HttpParserUpgrade,
            ):
                print("Ooops... Parser error")
                break
        if self._parsed:
            return self.request_klass(
                method=self.parser.get_method(),
                url=self._url,
                headers=self._headers,
                body=self._body,
            )
        return None

    def handle_request(self, request: HTTPRequest) -> HTTPResponse:
        return self.response_klass(status=405, headers={}, body=b"")

    def handle_response(self, response: HTTPResponse) -> None:
        self.socket.sendall(response.to_http1())

    def on_url(self, url: bytes) -> None:
        self._url = url

    def on_header(self, name: bytes, value: bytes) -> None:
        self._headers[name] = value

    def on_body(self, body: bytes) -> None:
        self._body = body

    def on_message_complete(self) -> None:
        self._parsed = True
Exemplo n.º 27
0
class HttpProtocol(asyncio.Protocol):
    """
    This class provides a basic HTTP implementation of the sanic framework.
    """

    __slots__ = (
        # app
        "app",
        # event loop, connection
        "loop",
        "transport",
        "connections",
        "signal",
        # request params
        "parser",
        "request",
        "url",
        "headers",
        # request config
        "request_handler",
        "request_timeout",
        "response_timeout",
        "keep_alive_timeout",
        "request_max_size",
        "request_buffer_queue_size",
        "request_class",
        "is_request_stream",
        "router",
        "error_handler",
        # enable or disable access log purpose
        "access_log",
        # connection management
        "_total_request_size",
        "_request_timeout_handler",
        "_response_timeout_handler",
        "_keep_alive_timeout_handler",
        "_last_request_time",
        "_last_response_time",
        "_is_stream_handler",
        "_not_paused",
        "_request_handler_task",
        "_request_stream_task",
        "_keep_alive",
        "_header_fragment",
        "state",
        "_debug",
    )

    def __init__(self,
                 *,
                 loop,
                 app,
                 request_handler,
                 error_handler,
                 signal=Signal(),
                 connections=None,
                 request_timeout=60,
                 response_timeout=60,
                 keep_alive_timeout=5,
                 request_max_size=None,
                 request_buffer_queue_size=100,
                 request_class=None,
                 access_log=True,
                 keep_alive=True,
                 is_request_stream=False,
                 router=None,
                 state=None,
                 debug=False,
                 **kwargs):
        self.loop = loop
        self.app = app
        self.transport = None
        self.request = None
        self.parser = None
        self.url = None
        self.headers = None
        self.router = router
        self.signal = signal
        self.access_log = access_log
        self.connections = connections if connections is not None else set()
        self.request_handler = request_handler
        self.error_handler = error_handler
        self.request_timeout = request_timeout
        self.request_buffer_queue_size = request_buffer_queue_size
        self.response_timeout = response_timeout
        self.keep_alive_timeout = keep_alive_timeout
        self.request_max_size = request_max_size
        self.request_class = request_class or Request
        self.is_request_stream = is_request_stream
        self._is_stream_handler = False
        self._not_paused = asyncio.Event(loop=loop)
        self._total_request_size = 0
        self._request_timeout_handler = None
        self._response_timeout_handler = None
        self._keep_alive_timeout_handler = None
        self._last_request_time = None
        self._last_response_time = None
        self._request_handler_task = None
        self._request_stream_task = None
        self._keep_alive = keep_alive
        self._header_fragment = b""
        self.state = state if state else {}
        if "requests_count" not in self.state:
            self.state["requests_count"] = 0
        self._debug = debug
        self._not_paused.set()

    @property
    def keep_alive(self):
        """
        Check if the connection needs to be kept alive based on the params
        attached to the `_keep_alive` attribute, :attr:`Signal.stopped`
        and :func:`HttpProtocol.parser.should_keep_alive`

        :return: ``True`` if connection is to be kept alive ``False`` else
        """
        return (self._keep_alive and not self.signal.stopped
                and self.parser.should_keep_alive())

    # -------------------------------------------- #
    # Connection
    # -------------------------------------------- #

    def connection_made(self, transport):
        self.connections.add(self)
        self._request_timeout_handler = self.loop.call_later(
            self.request_timeout, self.request_timeout_callback)
        self.transport = transport
        self._last_request_time = time()

    def connection_lost(self, exc):
        self.connections.discard(self)
        if self._request_handler_task:
            self._request_handler_task.cancel()
        if self._request_stream_task:
            self._request_stream_task.cancel()
        if self._request_timeout_handler:
            self._request_timeout_handler.cancel()
        if self._response_timeout_handler:
            self._response_timeout_handler.cancel()
        if self._keep_alive_timeout_handler:
            self._keep_alive_timeout_handler.cancel()

    def pause_writing(self):
        self._not_paused.clear()

    def resume_writing(self):
        self._not_paused.set()

    def request_timeout_callback(self):
        # See the docstring in the RequestTimeout exception, to see
        # exactly what this timeout is checking for.
        # Check if elapsed time since request initiated exceeds our
        # configured maximum request timeout value
        time_elapsed = time() - self._last_request_time
        if time_elapsed < self.request_timeout:
            time_left = self.request_timeout - time_elapsed
            self._request_timeout_handler = self.loop.call_later(
                time_left, self.request_timeout_callback)
        else:
            if self._request_stream_task:
                self._request_stream_task.cancel()
            if self._request_handler_task:
                self._request_handler_task.cancel()
            self.write_error(RequestTimeout("Request Timeout"))

    def response_timeout_callback(self):
        # Check if elapsed time since response was initiated exceeds our
        # configured maximum request timeout value
        time_elapsed = time() - self._last_request_time
        if time_elapsed < self.response_timeout:
            time_left = self.response_timeout - time_elapsed
            self._response_timeout_handler = self.loop.call_later(
                time_left, self.response_timeout_callback)
        else:
            if self._request_stream_task:
                self._request_stream_task.cancel()
            if self._request_handler_task:
                self._request_handler_task.cancel()
            self.write_error(ServiceUnavailable("Response Timeout"))

    def keep_alive_timeout_callback(self):
        """
        Check if elapsed time since last response exceeds our configured
        maximum keep alive timeout value and if so, close the transport
        pipe and let the response writer handle the error.

        :return: None
        """
        time_elapsed = time() - self._last_response_time
        if time_elapsed < self.keep_alive_timeout:
            time_left = self.keep_alive_timeout - time_elapsed
            self._keep_alive_timeout_handler = self.loop.call_later(
                time_left, self.keep_alive_timeout_callback)
        else:
            logger.debug("KeepAlive Timeout. Closing connection.")
            self.transport.close()
            self.transport = None

    # -------------------------------------------- #
    # Parsing
    # -------------------------------------------- #

    def data_received(self, data):
        # Check for the request itself getting too large and exceeding
        # memory limits
        self._total_request_size += len(data)
        if self._total_request_size > self.request_max_size:
            self.write_error(PayloadTooLarge("Payload Too Large"))

        # Create parser if this is the first time we're receiving data
        if self.parser is None:
            assert self.request is None
            self.headers = []
            self.parser = HttpRequestParser(self)

        # requests count
        self.state["requests_count"] = self.state["requests_count"] + 1

        # Parse request chunk or close connection
        try:
            self.parser.feed_data(data)
        except HttpParserError:
            message = "Bad Request"
            if self._debug:
                message += "\n" + traceback.format_exc()
            self.write_error(InvalidUsage(message))

    def on_url(self, url):
        if not self.url:
            self.url = url
        else:
            self.url += url

    def on_header(self, name, value):
        self._header_fragment += name

        if value is not None:
            if (self._header_fragment == b"Content-Length"
                    and int(value) > self.request_max_size):
                self.write_error(PayloadTooLarge("Payload Too Large"))
            try:
                value = value.decode()
            except UnicodeDecodeError:
                value = value.decode("latin_1")
            self.headers.append(
                (self._header_fragment.decode().casefold(), value))

            self._header_fragment = b""

    def on_headers_complete(self):
        self.request = self.request_class(
            url_bytes=self.url,
            headers=Header(self.headers),
            version=self.parser.get_http_version(),
            method=self.parser.get_method().decode(),
            transport=self.transport,
            app=self.app,
        )
        # Remove any existing KeepAlive handler here,
        # It will be recreated if required on the new request.
        if self._keep_alive_timeout_handler:
            self._keep_alive_timeout_handler.cancel()
            self._keep_alive_timeout_handler = None

        if self.request.headers.get(EXPECT_HEADER):
            self.expect_handler()

        if self.is_request_stream:
            self._is_stream_handler = self.router.is_stream_handler(
                self.request)
            if self._is_stream_handler:
                self.request.stream = StreamBuffer(
                    self.request_buffer_queue_size)
                self.execute_request_handler()

    def expect_handler(self):
        """
        Handler for Expect Header.
        """
        expect = self.request.headers.get(EXPECT_HEADER)
        if self.request.version == "1.1":
            if expect.lower() == "100-continue":
                self.transport.write(b"HTTP/1.1 100 Continue\r\n\r\n")
            else:
                self.write_error(
                    HeaderExpectationFailed(
                        "Unknown Expect: {expect}".format(expect=expect)))

    def on_body(self, body):
        if self.is_request_stream and self._is_stream_handler:
            self._request_stream_task = self.loop.create_task(
                self.body_append(body))
        else:
            self.request.body_push(body)

    async def body_append(self, body):
        if self.request.stream.is_full():
            self.transport.pause_reading()
            await self.request.stream.put(body)
            self.transport.resume_reading()
        else:
            await self.request.stream.put(body)

    def on_message_complete(self):
        # Entire request (headers and whole body) is received.
        # We can cancel and remove the request timeout handler now.
        if self._request_timeout_handler:
            self._request_timeout_handler.cancel()
            self._request_timeout_handler = None
        if self.is_request_stream and self._is_stream_handler:
            self._request_stream_task = self.loop.create_task(
                self.request.stream.put(None))
            return
        self.request.body_finish()
        self.execute_request_handler()

    def execute_request_handler(self):
        """
        Invoke the request handler defined by the
        :func:`sanic.app.Sanic.handle_request` method

        :return: None
        """
        self._response_timeout_handler = self.loop.call_later(
            self.response_timeout, self.response_timeout_callback)
        self._last_request_time = time()
        self._request_handler_task = self.loop.create_task(
            self.request_handler(self.request, self.write_response,
                                 self.stream_response))

    # -------------------------------------------- #
    # Responding
    # -------------------------------------------- #
    def log_response(self, response):
        """
        Helper method provided to enable the logging of responses in case if
        the :attr:`HttpProtocol.access_log` is enabled.

        :param response: Response generated for the current request

        :type response: :class:`sanic.response.HTTPResponse` or
            :class:`sanic.response.StreamingHTTPResponse`

        :return: None
        """
        if self.access_log:
            extra = {"status": getattr(response, "status", 0)}

            if isinstance(response, HTTPResponse):
                extra["byte"] = len(response.body)
            else:
                extra["byte"] = -1

            extra["host"] = "UNKNOWN"
            if self.request is not None:
                if self.request.ip:
                    extra["host"] = "{0}:{1}".format(self.request.ip,
                                                     self.request.port)

                extra["request"] = "{0} {1}".format(self.request.method,
                                                    self.request.url)
            else:
                extra["request"] = "nil"

            access_logger.info("", extra=extra)

    def write_response(self, response):
        """
        Writes response content synchronously to the transport.
        """
        if self._response_timeout_handler:
            self._response_timeout_handler.cancel()
            self._response_timeout_handler = None
        try:
            keep_alive = self.keep_alive
            self.transport.write(
                response.output(self.request.version, keep_alive,
                                self.keep_alive_timeout))
            self.log_response(response)
        except AttributeError:
            logger.error(
                "Invalid response object for url %s, "
                "Expected Type: HTTPResponse, Actual Type: %s",
                self.url,
                type(response),
            )
            self.write_error(ServerError("Invalid response type"))
        except RuntimeError:
            if self._debug:
                logger.error(
                    "Connection lost before response written @ %s",
                    self.request.ip,
                )
            keep_alive = False
        except Exception as e:
            self.bail_out(
                "Writing response failed, connection closed {}".format(
                    repr(e)))
        finally:
            if not keep_alive:
                self.transport.close()
                self.transport = None
            else:
                self._keep_alive_timeout_handler = self.loop.call_later(
                    self.keep_alive_timeout, self.keep_alive_timeout_callback)
                self._last_response_time = time()
                self.cleanup()

    async def drain(self):
        await self._not_paused.wait()

    async def push_data(self, data):
        self.transport.write(data)

    async def stream_response(self, response):
        """
        Streams a response to the client asynchronously. Attaches
        the transport to the response so the response consumer can
        write to the response as needed.
        """
        if self._response_timeout_handler:
            self._response_timeout_handler.cancel()
            self._response_timeout_handler = None

        try:
            keep_alive = self.keep_alive
            response.protocol = self
            await response.stream(self.request.version, keep_alive,
                                  self.keep_alive_timeout)
            self.log_response(response)
        except AttributeError:
            logger.error(
                "Invalid response object for url %s, "
                "Expected Type: HTTPResponse, Actual Type: %s",
                self.url,
                type(response),
            )
            self.write_error(ServerError("Invalid response type"))
        except RuntimeError:
            if self._debug:
                logger.error(
                    "Connection lost before response written @ %s",
                    self.request.ip,
                )
            keep_alive = False
        except Exception as e:
            self.bail_out(
                "Writing response failed, connection closed {}".format(
                    repr(e)))
        finally:
            if not keep_alive:
                self.transport.close()
                self.transport = None
            else:
                self._keep_alive_timeout_handler = self.loop.call_later(
                    self.keep_alive_timeout, self.keep_alive_timeout_callback)
                self._last_response_time = time()
                self.cleanup()

    def write_error(self, exception):
        # An error _is_ a response.
        # Don't throw a response timeout, when a response _is_ given.
        if self._response_timeout_handler:
            self._response_timeout_handler.cancel()
            self._response_timeout_handler = None
        response = None
        try:
            response = self.error_handler.response(self.request, exception)
            version = self.request.version if self.request else "1.1"
            self.transport.write(response.output(version))
        except RuntimeError:
            if self._debug:
                logger.error(
                    "Connection lost before error written @ %s",
                    self.request.ip if self.request else "Unknown",
                )
        except Exception as e:
            self.bail_out(
                "Writing error failed, connection closed {}".format(repr(e)),
                from_error=True,
            )
        finally:
            if self.parser and (self.keep_alive
                                or getattr(response, "status", 0) == 408):
                self.log_response(response)
            try:
                self.transport.close()
            except AttributeError:
                logger.debug("Connection lost before server could close it.")

    def bail_out(self, message, from_error=False):
        """
        In case if the transport pipes are closed and the sanic app encounters
        an error while writing data to the transport pipe, we log the error
        with proper details.

        :param message: Error message to display
        :param from_error: If the bail out was invoked while handling an
            exception scenario.

        :type message: str
        :type from_error: bool

        :return: None
        """
        if from_error or self.transport is None or self.transport.is_closing():
            logger.error(
                "Transport closed @ %s and exception "
                "experienced during error handling",
                (self.transport.get_extra_info("peername")
                 if self.transport is not None else "N/A"),
            )
            logger.debug("Exception:", exc_info=True)
        else:
            self.write_error(ServerError(message))
            logger.error(message)

    def cleanup(self):
        """This is called when KeepAlive feature is used,
        it resets the connection in order for it to be able
        to handle receiving another request on the same connection."""
        self.parser = None
        self.request = None
        self.url = None
        self.headers = None
        self._request_handler_task = None
        self._request_stream_task = None
        self._total_request_size = 0
        self._is_stream_handler = False

    def close_if_idle(self):
        """Close the connection if a request is not being sent or received

        :return: boolean - True if closed, false if staying open
        """
        if not self.parser:
            self.transport.close()
            return True
        return False

    def close(self):
        """
        Force close the connection.
        """
        if self.transport is not None:
            self.transport.close()
            self.transport = None
Exemplo n.º 28
0
class HttpProtocol(asyncio.Protocol):
    """
    This class provides a basic HTTP implementation of the web framework.
    """

    __slots__ = (
        # app
        "app",
        # event loop, connection
        "loop",
        "transport",
        "connections",
        "signal",
        # request params
        "parser",
        "request",
        "url",
        "headers",
        # request config
        "request_handler",
        "request_timeout",
        "response_timeout",
        "keep_alive_timeout",
        "request_max_size",
        "request_buffer_queue_size",
        "request_class",
        "is_request_stream",
        "error_handler",
        # enable or disable access log purpose
        "access_log",
        # connection management
        "_total_request_size",
        "_request_timeout_handler",
        "_response_timeout_handler",
        "_keep_alive_timeout_handler",
        "_last_request_time",
        "_last_response_time",
        "_is_stream_handler",
        "_not_paused",
        "_request_handler_task",
        "_request_stream_task",
        "_keep_alive",
        "_header_fragment",
        "state",
        "_body_chunks",
    )

    def __init__(
            self,
            *,
            loop,
            app,
            signal=Signal(),
            connections=None,
            state=None,
            **kwargs,
    ):
        asyncio.set_event_loop(loop)
        self.loop = loop
        deprecated_loop = self.loop if sys.version_info < (3, 7) else None
        self.app = app
        self.transport = None
        self.request = None
        self.parser = None
        self.url = None
        self.headers = None
        self.signal = signal
        self.access_log = self.app.config.ACCESS_LOG
        self.connections = connections if connections is not None else set()
        self.request_handler = self.app.handle_request
        self.error_handler = self.app.error_handler
        self.request_timeout = self.app.config.REQUEST_TIMEOUT
        self.request_buffer_queue_size = (
            self.app.config.REQUEST_BUFFER_QUEUE_SIZE)
        self.response_timeout = self.app.config.RESPONSE_TIMEOUT
        self.keep_alive_timeout = self.app.config.KEEP_ALIVE_TIMEOUT
        self.request_max_size = self.app.config.REQUEST_MAX_SIZE
        self.request_class = self.app.request_class or Request
        self.is_request_stream = self.app.is_request_stream
        self._is_stream_handler = False
        self._not_paused = asyncio.Event(loop=deprecated_loop)
        self._total_request_size = 0
        self._request_timeout_handler = None
        self._response_timeout_handler = None
        self._keep_alive_timeout_handler = None
        self._last_request_time = None
        self._last_response_time = None
        self._request_handler_task = None
        self._request_stream_task = None
        self._keep_alive = self.app.config.KEEP_ALIVE
        self._header_fragment = b""
        self.state = state if state else {}
        if "requests_count" not in self.state:
            self.state["requests_count"] = 0
        # 设置Event对象内部的信号标志为真
        self._not_paused.set()
        self._body_chunks = deque()

    @property
    def keep_alive(self):
        """
        Check if the connection needs to be kept alive based on the params
        attached to the `_keep_alive` attribute, :attr:`Signal.stopped`
        and :func:`HttpProtocol.parser.should_keep_alive`

        :return: ``True`` if connection is to be kept alive ``False`` else
        """
        return (self._keep_alive and not self.signal.stopped
                and self.parser.should_keep_alive())

    # -------------------------------------------- #
    # Connection
    # -------------------------------------------- #

    def connection_made(self, transport):
        """创建连接时执行 ."""
        self.connections.add(self)
        # 设置请求超时处理函数
        self._request_timeout_handler = self.loop.call_later(
            self.request_timeout, self.request_timeout_callback)
        self.transport = transport
        # 记录请求时间
        self._last_request_time = time()

    def connection_lost(self, exc):
        """连接丢失时执行 ."""
        self.connections.discard(self)
        if self._request_handler_task:
            self._request_handler_task.cancel()
        if self._request_stream_task:
            self._request_stream_task.cancel()
        if self._request_timeout_handler:
            self._request_timeout_handler.cancel()
        if self._response_timeout_handler:
            self._response_timeout_handler.cancel()
        if self._keep_alive_timeout_handler:
            self._keep_alive_timeout_handler.cancel()

    def pause_writing(self):
        self._not_paused.clear()

    def resume_writing(self):
        self._not_paused.set()

    def request_timeout_callback(self):
        """请求超时处理函数 ."""
        # See the docstring in the RequestTimeout exception, to see
        # exactly what this timeout is checking for.
        # Check if elapsed time since request initiated exceeds our
        # configured maximum request timeout value
        time_elapsed = time() - self._last_request_time
        # 判断请求是否超时
        # 没有超时,继续判断
        if time_elapsed < self.request_timeout:
            time_left = self.request_timeout - time_elapsed
            self._request_timeout_handler = self.loop.call_later(
                time_left, self.request_timeout_callback)
        else:
            # 超时取消流任务
            if self._request_stream_task:
                self._request_stream_task.cancel()
            # 超时取消Handler请求
            if self._request_handler_task:
                self._request_handler_task.cancel()
            self.write_error(RequestTimeout("Request Timeout"))

    def response_timeout_callback(self):
        # Check if elapsed time since response was initiated exceeds our
        # configured maximum request timeout value
        time_elapsed = time() - self._last_request_time
        if time_elapsed < self.response_timeout:
            time_left = self.response_timeout - time_elapsed
            self._response_timeout_handler = self.loop.call_later(
                time_left, self.response_timeout_callback)
        else:
            if self._request_stream_task:
                self._request_stream_task.cancel()
            if self._request_handler_task:
                self._request_handler_task.cancel()
            self.write_error(ServiceUnavailable("Response Timeout"))

    def keep_alive_timeout_callback(self):
        """
        Check if elapsed time since last response exceeds our configured
        maximum keep alive timeout value and if so, close the transport
        pipe and let the response writer handle the error.

        :return: None
        """
        time_elapsed = time() - self._last_response_time
        if time_elapsed < self.keep_alive_timeout:
            time_left = self.keep_alive_timeout - time_elapsed
            self._keep_alive_timeout_handler = self.loop.call_later(
                time_left, self.keep_alive_timeout_callback)
        else:
            logger.debug("KeepAlive Timeout. Closing connection.")
            # 关闭连接
            self.transport.close()
            self.transport = None

    # -------------------------------------------- #
    # Parsing
    # -------------------------------------------- #

    def data_received(self, data):
        """接受到HTTP请求时调用 ."""
        # Check for the request itself getting too large and exceeding
        # memory limits
        self._total_request_size += len(data)
        # 限制请求内容的长度
        if self._total_request_size > self.request_max_size:
            self.write_error(PayloadTooLarge("Payload Too Large"))

        # Create parser if this is the first time we're receiving data
        if self.parser is None:
            assert self.request is None
            self.headers = []
            self.parser = HttpRequestParser(self)

        # requests count
        self.state["requests_count"] = self.state["requests_count"] + 1

        # Parse request chunk or close connection
        try:
            # 解析HTTP协议
            self.parser.feed_data(data)
        except HttpParserError:
            # 如果不是合法的HTTP协议,返回400错误
            message = "Bad Request"
            if self.app.debug:
                message += "\n" + traceback.format_exc()
            self.write_error(InvalidUsage(message))

    def on_url(self, url):
        if not self.url:
            self.url = url
        else:
            self.url += url

    def on_header(self, name, value):
        self._header_fragment += name

        if value is not None:
            if (self._header_fragment == b"Content-Length"
                    and int(value) > self.request_max_size):
                self.write_error(PayloadTooLarge("Payload Too Large"))
            try:
                value = value.decode()
            except UnicodeDecodeError:
                value = value.decode("latin_1")
            self.headers.append(
                (self._header_fragment.decode().casefold(), value))

            self._header_fragment = b""

    def on_headers_complete(self):
        """在服务器接收到头部后,创建一个请求对象 ."""
        self.request = self.request_class(
            url_bytes=self.url,
            headers=Header(self.headers),
            version=self.parser.get_http_version(),
            method=self.parser.get_method().decode(),
            transport=self.transport,
            app=self.app,
        )
        # Remove any existing KeepAlive handler here,
        # It will be recreated if required on the new request.
        if self._keep_alive_timeout_handler:
            self._keep_alive_timeout_handler.cancel()
            self._keep_alive_timeout_handler = None

        if self.request.headers.get(EXPECT_HEADER):
            self.expect_handler()

        if self.is_request_stream:
            self._is_stream_handler = self.app.router.is_stream_handler(
                self.request)
            if self._is_stream_handler:
                self.request.stream = StreamBuffer(
                    self.request_buffer_queue_size)
                self.execute_request_handler()

    def expect_handler(self):
        """
        Handler for Expect Header.
        """
        expect = self.request.headers.get(EXPECT_HEADER)
        if self.request.version == "1.1":
            if expect.lower() == "100-continue":
                self.transport.write(b"HTTP/1.1 100 Continue\r\n\r\n")
            else:
                self.write_error(
                    HeaderExpectationFailed(f"Unknown Expect: {expect}"))

    def on_body(self, body):
        """服务器接收到body后,将其存放到请求对象中 ."""
        if self.is_request_stream and self._is_stream_handler:
            # body chunks can be put into asyncio.Queue out of order if
            # multiple tasks put concurrently and the queue is full in python
            # 3.7. so we should not create more than one task putting into the
            # queue simultaneously.
            self._body_chunks.append(body)
            if (not self._request_stream_task
                    or self._request_stream_task.done()):
                self._request_stream_task = self.loop.create_task(
                    self.stream_append())
        else:
            self.request.body_push(body)

    async def body_append(self, body):
        if (self.request is None or self._request_stream_task is None
                or self._request_stream_task.cancelled()):
            return

        if self.request.stream.is_full():
            self.transport.pause_reading()
            await self.request.stream.put(body)
            self.transport.resume_reading()
        else:
            await self.request.stream.put(body)

    async def stream_append(self):
        while self._body_chunks:
            body = self._body_chunks.popleft()
            if self.request.stream.is_full():
                self.transport.pause_reading()
                await self.request.stream.put(body)
                self.transport.resume_reading()
            else:
                await self.request.stream.put(body)

    def on_message_complete(self):
        """服务器收到全部消息 ."""
        # Entire request (headers and whole body) is received.
        # We can cancel and remove the request timeout handler now.
        if self._request_timeout_handler:
            self._request_timeout_handler.cancel()
            self._request_timeout_handler = None
        if self.is_request_stream and self._is_stream_handler:
            self._body_chunks.append(None)
            if (not self._request_stream_task
                    or self._request_stream_task.done()):
                self._request_stream_task = self.loop.create_task(
                    self.stream_append())
            return
        # 服务器收到全部请求后,将完整的body放到请求对象中
        self.request.body_finish()
        # 执行请求处理器
        self.execute_request_handler()

    def execute_request_handler(self):
        """
        Invoke the request handler defined by the
        :func:`sanic.app.Sanic.handle_request` method

        :return: None
        """
        self._response_timeout_handler = self.loop.call_later(
            self.response_timeout, self.response_timeout_callback)
        self._last_request_time = time()
        # 创建执行请求处理器任务
        self._request_handler_task = self.loop.create_task(
            self.request_handler(self.request, self.write_response,
                                 self.stream_response))

    # -------------------------------------------- #
    # Responding
    # -------------------------------------------- #
    def log_response(self, response):
        """
        Helper method provided to enable the logging of responses in case if
        the :attr:`HttpProtocol.access_log` is enabled.

        :param response: Response generated for the current request

        :type response: :class:`sanic.response.HTTPResponse` or
            :class:`sanic.response.StreamingHTTPResponse`

        :return: None
        """
        if self.access_log:
            extra = {"status": getattr(response, "status", 0)}

            if isinstance(response, HTTPResponse):
                extra["byte"] = len(response.body)
            else:
                extra["byte"] = -1

            extra["host"] = "UNKNOWN"
            if self.request is not None:
                if self.request.ip:
                    extra["host"] = f"{self.request.ip}:{self.request.port}"

                extra["request"] = f"{self.request.method} {self.request.url}"
            else:
                extra["request"] = "nil"

            access_logger.info("", extra=extra)

    def write_response(self, response):
        """处理HTTP响应
        Writes response content synchronously to the transport.
        """
        if self._response_timeout_handler:
            self._response_timeout_handler.cancel()
            self._response_timeout_handler = None
        try:
            keep_alive = self.keep_alive
            self.transport.write(
                response.output(self.request.version, keep_alive,
                                self.keep_alive_timeout))
            self.log_response(response)
        except AttributeError:
            logger.error(
                "Invalid response object for url %s, "
                "Expected Type: HTTPResponse, Actual Type: %s",
                self.url,
                type(response),
            )
            self.write_error(ServerError("Invalid response type"))
        except RuntimeError:
            if self.app.debug:
                logger.error(
                    "Connection lost before response written @ %s",
                    self.request.ip,
                )
            keep_alive = False
        except Exception as e:
            self.bail_out(f"Writing response failed, connection closed {e!r}")
        finally:
            if not keep_alive:
                self.transport.close()
                self.transport = None
            else:
                self._keep_alive_timeout_handler = self.loop.call_later(
                    self.keep_alive_timeout, self.keep_alive_timeout_callback)
                self._last_response_time = time()
                self.cleanup()

    async def drain(self):
        await self._not_paused.wait()

    async def push_data(self, data):
        self.transport.write(data)

    async def stream_response(self, response):
        """
        Streams a response to the client asynchronously. Attaches
        the transport to the response so the response consumer can
        write to the response as needed.
        """
        if self._response_timeout_handler:
            self._response_timeout_handler.cancel()
            self._response_timeout_handler = None

        try:
            keep_alive = self.keep_alive
            response.protocol = self
            await response.stream(self.request.version, keep_alive,
                                  self.keep_alive_timeout)
            self.log_response(response)
        except AttributeError:
            logger.error(
                "Invalid response object for url %s, "
                "Expected Type: HTTPResponse, Actual Type: %s",
                self.url,
                type(response),
            )
            self.write_error(ServerError("Invalid response type"))
        except RuntimeError:
            if self.app.debug:
                logger.error(
                    "Connection lost before response written @ %s",
                    self.request.ip,
                )
            keep_alive = False
        except Exception as e:
            self.bail_out(f"Writing response failed, connection closed {e!r}")
        finally:
            if not keep_alive:
                self.transport.close()
                self.transport = None
            else:
                self._keep_alive_timeout_handler = self.loop.call_later(
                    self.keep_alive_timeout, self.keep_alive_timeout_callback)
                self._last_response_time = time()
                self.cleanup()

    def write_error(self, exception):
        # An error _is_ a response.
        # Don't throw a response timeout, when a response _is_ given.
        if self._response_timeout_handler:
            self._response_timeout_handler.cancel()
            self._response_timeout_handler = None
        response = None
        try:
            response = self.error_handler.response(self.request, exception)
            version = self.request.version if self.request else "1.1"
            self.transport.write(response.output(version))
        except RuntimeError:
            if self.app.debug:
                logger.error(
                    "Connection lost before error written @ %s",
                    self.request.ip if self.request else "Unknown",
                )
        except Exception as e:
            self.bail_out(
                f"Writing error failed, connection closed {e!r}",
                from_error=True,
            )
        finally:
            if self.parser and (self.keep_alive
                                or getattr(response, "status", 0) == 408):
                self.log_response(response)
            try:
                self.transport.close()
            except AttributeError:
                logger.debug("Connection lost before server could close it.")

    def bail_out(self, message, from_error=False):
        """
        In case if the transport pipes are closed and the sanic app encounters
        an error while writing data to the transport pipe, we log the error
        with proper details.

        :param message: Error message to display
        :param from_error: If the bail out was invoked while handling an
            exception scenario.

        :type message: str
        :type from_error: bool

        :return: None
        """
        if from_error or self.transport is None or self.transport.is_closing():
            logger.error(
                "Transport closed @ %s and exception "
                "experienced during error handling",
                (self.transport.get_extra_info("peername")
                 if self.transport is not None else "N/A"),
            )
            logger.debug("Exception:", exc_info=True)
        else:
            self.write_error(ServerError(message))
            logger.error(message)

    def cleanup(self):
        """This is called when KeepAlive feature is used,
        it resets the connection in order for it to be able
        to handle receiving another request on the same connection."""
        self.parser = None
        self.request = None
        self.url = None
        self.headers = None
        self._request_handler_task = None
        self._request_stream_task = None
        self._total_request_size = 0
        self._is_stream_handler = False

    def close_if_idle(self):
        """Close the connection if a request is not being sent or received

        :return: boolean - True if closed, false if staying open
        """
        if not self.parser and self.transport is not None:
            self.transport.close()
            return True
        return False

    def close(self):
        """
        Force close the connection.
        """
        if self.transport is not None:
            self.transport.close()
            self.transport = None
Exemplo n.º 29
0
class Channel:

    __slots__ = (
        'parser',
        'request',
        'complete',
        'headers_complete',
        'socket',
        'reader',
    )

    def __init__(self, socket):
        self.complete = False
        self.headers_complete = False
        self.parser = HttpRequestParser(self)
        self.request = None
        self.socket = socket
        self.reader = self._reader()

    def data_received(self, data: bytes):
        try:
            self.parser.feed_data(data)
        except HttpParserUpgrade:
            self.request.upgrade = True
        except (HttpParserError, HttpParserInvalidMethodError) as exc:
            # We should log the exc.
            raise HTTPError(
                HTTPStatus.BAD_REQUEST, 'Unparsable request.')

    async def read(self, parse: bool=True) -> bytes:
        data = await self.socket.recv(1024)
        if data:
            if parse:
                self.data_received(data)
            return data

    async def _reader(self) -> bytes:
        while not self.complete:
            data = await self.read()
            if not data:
                break
            yield data

    async def _drainer(self) -> bytes:
        while True:
            data = await self.read(parse=False)
            if not data:
                break
            yield data

    def on_header(self, name: bytes, value: bytes):
        value = value.decode()
        if value:
            name = name.decode().title()
            if name in self.request.headers:
                self.request.headers[name] += ', {}'.format(value)
            else:
                self.request.headers[name] = value

    def on_body(self, data: bytes):
        self.request.body += data

    def on_message_begin(self):
        self.complete = False
        self.request = Request(self.socket, self.reader)

    def on_message_complete(self):
        self.complete = True

    def on_url(self, url: bytes):
        self.request.url = url
        parsed = parse_url(url)
        self.request.path = unquote(parsed.path.decode())
        self.request.query_string = (parsed.query or b'').decode()

    def on_headers_complete(self):
        self.request.keep_alive = self.parser.should_keep_alive()
        self.request.method = self.parser.get_method().decode().upper()
        self.headers_complete = True

    async def __aiter__(self):
        keep_alive = True
        while keep_alive:
            data = await self.read()
            if data is None:
                break
            if self.headers_complete:
                yield self.request
                keep_alive = self.request.keep_alive
                if keep_alive:
                    if not self.complete:
                        await self.reader.aclose()
                        # We drain if there's an uncomplete request.
                        async for _ in self._drainer():
                            pass
                    self.request = None
                    self.complete = False
                    self.headers_complete = False
                    self.reader = self._reader()
Exemplo n.º 30
0
class Protocol(asyncio.Protocol):
    """Responsible of parsing the request and writing the response.

    You can subclass it to set your own `Query`, `Request` or `Response`
    classes.
    """

    __slots__ = ('app', 'req', 'parser', 'resp', 'writer')
    Query = Query
    Request = Request
    Response = Response

    def __init__(self, app):
        self.app = app
        self.parser = HttpRequestParser(self)

    def data_received(self, data: bytes):
        try:
            self.parser.feed_data(data)
        except HttpParserError:
            # If the parsing failed before on_message_begin, we don't have a
            # response.
            self.response = Response()
            self.response.status = HTTPStatus.BAD_REQUEST
            self.response.body = b'Unparsable request'
            self.write()

    def connection_made(self, transport):
        self.writer = transport

    # All on_xxx methods are in use by httptools parser.
    # See https://github.com/MagicStack/httptools#apis
    def on_header(self, name: bytes, value: bytes):
        self.request.headers[name.decode()] = value.decode()

    def on_body(self, body: bytes):
        self.request.body += body

    def on_url(self, url: bytes):
        self.request.url = url
        parsed = parse_url(url)
        self.request.path = parsed.path.decode()
        self.request.query_string = (parsed.query or b'').decode()
        parsed_qs = parse_qs(self.request.query_string, keep_blank_values=True)
        self.request.query = self.Query(parsed_qs)

    def on_message_begin(self):
        self.request = self.Request()
        self.response = self.Response()

    def on_message_complete(self):
        self.request.method = self.parser.get_method().decode().upper()
        task = self.app.loop.create_task(self.app(self.request, self.response))
        task.add_done_callback(self.write)

    # May or may not have "future" as arg.
    def write(self, *args):
        # Appends bytes for performances.
        payload = b'HTTP/1.1 %a %b\r\n' % (
            self.response.status.value, self.response.status.phrase.encode())
        if not isinstance(self.response.body, bytes):
            self.response.body = self.response.body.encode()
        if 'Content-Length' not in self.response.headers:
            length = len(self.response.body)
            self.response.headers['Content-Length'] = str(length)
        for key, value in self.response.headers.items():
            payload += b'%b: %b\r\n' % (key.encode(), str(value).encode())
        payload += b'\r\n%b' % self.response.body
        self.writer.write(payload)
        if not self.parser.should_keep_alive():
            self.writer.close()
Exemplo n.º 31
0
class HttpProtocol(asyncio.Protocol):
    __slots__ = (
        # event loop, connection
        'loop',
        'transport',
        'connections',
        'signal',
        # request params
        'parser',
        'request',
        'url',
        'headers',
        # request config
        'request_handler',
        'request_timeout',
        'request_max_size',
        'request_class',
        'is_request_stream',
        'router',
        # enable or disable access log / error log purpose
        'has_log',
        # connection management
        '_total_request_size',
        '_timeout_handler',
        '_last_communication_time',
        '_is_stream_handler')

    def __init__(self,
                 *,
                 loop,
                 request_handler,
                 error_handler,
                 signal=Signal(),
                 connections=set(),
                 request_timeout=60,
                 request_max_size=None,
                 request_class=None,
                 has_log=True,
                 keep_alive=True,
                 is_request_stream=False,
                 router=None,
                 state=None,
                 debug=False,
                 **kwargs):
        self.loop = loop
        self.transport = None
        self.request = None
        self.parser = None
        self.url = None
        self.headers = None
        self.router = router
        self.signal = signal
        self.has_log = has_log
        self.connections = connections
        self.request_handler = request_handler
        self.error_handler = error_handler
        self.request_timeout = request_timeout
        self.request_max_size = request_max_size
        self.request_class = request_class or Request
        self.is_request_stream = is_request_stream
        self._is_stream_handler = False
        self._total_request_size = 0
        self._timeout_handler = None
        self._last_request_time = None
        self._request_handler_task = None
        self._request_stream_task = None
        self._keep_alive = keep_alive
        self._header_fragment = b''
        self.state = state if state else {}
        if 'requests_count' not in self.state:
            self.state['requests_count'] = 0
        self._debug = debug

    @property
    def keep_alive(self):
        return (self._keep_alive and not self.signal.stopped
                and self.parser.should_keep_alive())

    # -------------------------------------------- #
    # Connection
    # -------------------------------------------- #

    def connection_made(self, transport):
        self.connections.add(self)
        self._timeout_handler = self.loop.call_later(self.request_timeout,
                                                     self.connection_timeout)
        self.transport = transport
        self._last_request_time = current_time

    def connection_lost(self, exc):
        self.connections.discard(self)
        self._timeout_handler.cancel()

    def connection_timeout(self):
        # Check if
        time_elapsed = current_time - self._last_request_time
        if time_elapsed < self.request_timeout:
            time_left = self.request_timeout - time_elapsed
            self._timeout_handler = (self.loop.call_later(
                time_left, self.connection_timeout))
        else:
            if self._request_stream_task:
                self._request_stream_task.cancel()
            if self._request_handler_task:
                self._request_handler_task.cancel()
            exception = RequestTimeout('Request Timeout')
            self.write_error(exception)

    # -------------------------------------------- #
    # Parsing
    # -------------------------------------------- #

    def data_received(self, data):
        # Check for the request itself getting too large and exceeding
        # memory limits
        self._total_request_size += len(data)
        if self._total_request_size > self.request_max_size:
            exception = PayloadTooLarge('Payload Too Large')
            self.write_error(exception)

        # Create parser if this is the first time we're receiving data
        if self.parser is None:
            assert self.request is None
            self.headers = []
            self.parser = HttpRequestParser(self)

        # requests count
        self.state['requests_count'] = self.state['requests_count'] + 1

        # Parse request chunk or close connection
        try:
            self.parser.feed_data(data)
        except HttpParserError:
            message = 'Bad Request'
            if self._debug:
                message += '\n' + traceback.format_exc()
            exception = InvalidUsage(message)
            self.write_error(exception)

    def on_url(self, url):
        if not self.url:
            self.url = url
        else:
            self.url += url

    def on_header(self, name, value):
        self._header_fragment += name

        if value is not None:
            if self._header_fragment == b'Content-Length' \
                    and int(value) > self.request_max_size:
                exception = PayloadTooLarge('Payload Too Large')
                self.write_error(exception)

            self.headers.append(
                (self._header_fragment.decode().casefold(), value.decode()))

            self._header_fragment = b''

    def on_headers_complete(self):
        self.request = self.request_class(
            url_bytes=self.url,
            headers=CIDict(self.headers),
            version=self.parser.get_http_version(),
            method=self.parser.get_method().decode(),
            transport=self.transport)
        if self.is_request_stream:
            self._is_stream_handler = self.router.is_stream_handler(
                self.request)
            if self._is_stream_handler:
                self.request.stream = asyncio.Queue()
                self.execute_request_handler()

    def on_body(self, body):
        if self.is_request_stream and self._is_stream_handler:
            self._request_stream_task = self.loop.create_task(
                self.request.stream.put(body))
            return
        self.request.body.append(body)

    def on_message_complete(self):
        if self.is_request_stream and self._is_stream_handler:
            self._request_stream_task = self.loop.create_task(
                self.request.stream.put(None))
            return
        self.request.body = b''.join(self.request.body)
        self.execute_request_handler()

    def execute_request_handler(self):
        self._request_handler_task = self.loop.create_task(
            self.request_handler(self.request, self.write_response,
                                 self.stream_response))

    # -------------------------------------------- #
    # Responding
    # -------------------------------------------- #
    def write_response(self, response):
        """
        Writes response content synchronously to the transport.
        """
        try:
            keep_alive = self.keep_alive
            self.transport.write(
                response.output(self.request.version, keep_alive,
                                self.request_timeout))
            if self.has_log:
                netlog.info('',
                            extra={
                                'status':
                                response.status,
                                'byte':
                                len(response.body),
                                'host':
                                '{0}:{1}'.format(self.request.ip[0],
                                                 self.request.ip[1]),
                                'request':
                                '{0} {1}'.format(self.request.method,
                                                 self.request.url)
                            })
        except AttributeError:
            log.error(('Invalid response object for url {}, '
                       'Expected Type: HTTPResponse, Actual Type: {}').format(
                           self.url, type(response)))
            self.write_error(ServerError('Invalid response type'))
        except RuntimeError:
            log.error('Connection lost before response written @ {}'.format(
                self.request.ip))
        except Exception as e:
            self.bail_out(
                "Writing response failed, connection closed {}".format(
                    repr(e)))
        finally:
            if not keep_alive:
                self.transport.close()
            else:
                self._last_request_time = current_time
                self.cleanup()

    async def stream_response(self, response):
        """
        Streams a response to the client asynchronously. Attaches
        the transport to the response so the response consumer can
        write to the response as needed.
        """

        try:
            keep_alive = self.keep_alive
            response.transport = self.transport
            await response.stream(self.request.version, keep_alive,
                                  self.request_timeout)
            if self.has_log:
                netlog.info('',
                            extra={
                                'status':
                                response.status,
                                'byte':
                                -1,
                                'host':
                                '{0}:{1}'.format(self.request.ip[0],
                                                 self.request.ip[1]),
                                'request':
                                '{0} {1}'.format(self.request.method,
                                                 self.request.url)
                            })
        except AttributeError:
            log.error(('Invalid response object for url {}, '
                       'Expected Type: HTTPResponse, Actual Type: {}').format(
                           self.url, type(response)))
            self.write_error(ServerError('Invalid response type'))
        except RuntimeError:
            log.error('Connection lost before response written @ {}'.format(
                self.request.ip))
        except Exception as e:
            self.bail_out(
                "Writing response failed, connection closed {}".format(
                    repr(e)))
        finally:
            if not keep_alive:
                self.transport.close()
            else:
                self._last_request_time = current_time
                self.cleanup()

    def write_error(self, exception):
        try:
            response = self.error_handler.response(self.request, exception)
            version = self.request.version if self.request else '1.1'
            self.transport.write(response.output(version))
        except RuntimeError:
            log.error('Connection lost before error written @ {}'.format(
                self.request.ip if self.request else 'Unknown'))
        except Exception as e:
            self.bail_out("Writing error failed, connection closed {}".format(
                repr(e)),
                          from_error=True)
        finally:
            if self.has_log:
                extra = {
                    'status': response.status,
                    'host': '',
                    'request': str(self.request) + str(self.url)
                }
                if response and isinstance(response, HTTPResponse):
                    extra['byte'] = len(response.body)
                else:
                    extra['byte'] = -1
                if self.request:
                    extra['host'] = '%s:%d' % self.request.ip,
                    extra['request'] = '%s %s' % (self.request.method,
                                                  self.url)
                netlog.info('', extra=extra)
            self.transport.close()

    def bail_out(self, message, from_error=False):
        if from_error or self.transport.is_closing():
            log.error(("Transport closed @ {} and exception "
                       "experienced during error handling").format(
                           self.transport.get_extra_info('peername')))
            log.debug('Exception:\n{}'.format(traceback.format_exc()))
        else:
            exception = ServerError(message)
            self.write_error(exception)
            log.error(message)

    def cleanup(self):
        self.parser = None
        self.request = None
        self.url = None
        self.headers = None
        self._request_handler_task = None
        self._request_stream_task = None
        self._total_request_size = 0
        self._is_stream_handler = False

    def close_if_idle(self):
        """Close the connection if a request is not being sent or received

        :return: boolean - True if closed, false if staying open
        """
        if not self.parser:
            self.transport.close()
            return True
        return False
Exemplo n.º 32
0
class HttpProtocol(asyncio.Protocol):
    __slots__ = (
        # event loop, connection
        'loop', 'transport', 'connections', 'signal',
        # request params
        'parser', 'request', 'url', 'headers',
        # request config
        'request_handler', 'request_timeout', 'request_max_size',
        'request_class', 'is_request_stream', 'router',
        # enable or disable access log / error log purpose
        'has_log',
        # connection management
        '_total_request_size', '_timeout_handler', '_last_communication_time',
        '_is_stream_handler')

    def __init__(self, *, loop, request_handler, error_handler,
                 signal=Signal(), connections=set(), request_timeout=60,
                 request_max_size=None, request_class=None, has_log=True,
                 keep_alive=True, is_request_stream=False, router=None,
                 state=None, debug=False, **kwargs):
        self.loop = loop
        self.transport = None
        self.request = None
        self.parser = None
        self.url = None
        self.headers = None
        self.router = router
        self.signal = signal
        self.has_log = has_log
        self.connections = connections
        self.request_handler = request_handler
        self.error_handler = error_handler
        self.request_timeout = request_timeout
        self.request_max_size = request_max_size
        self.request_class = request_class or Request
        self.is_request_stream = is_request_stream
        self._is_stream_handler = False
        self._total_request_size = 0
        self._timeout_handler = None
        self._last_request_time = None
        self._request_handler_task = None
        self._request_stream_task = None
        self._keep_alive = keep_alive
        self._header_fragment = b''
        self.state = state if state else {}
        if 'requests_count' not in self.state:
            self.state['requests_count'] = 0
        self._debug = debug

    @property
    def keep_alive(self):
        return (
            self._keep_alive and
            not self.signal.stopped and
            self.parser.should_keep_alive())

    # -------------------------------------------- #
    # Connection
    # -------------------------------------------- #

    def connection_made(self, transport):
        self.connections.add(self)
        self._timeout_handler = self.loop.call_later(
            self.request_timeout, self.connection_timeout)
        self.transport = transport
        self._last_request_time = current_time

    def connection_lost(self, exc):
        self.connections.discard(self)
        self._timeout_handler.cancel()

    def connection_timeout(self):
        # Check if
        time_elapsed = current_time - self._last_request_time
        if time_elapsed < self.request_timeout:
            time_left = self.request_timeout - time_elapsed
            self._timeout_handler = (
                self.loop.call_later(time_left, self.connection_timeout))
        else:
            if self._request_stream_task:
                self._request_stream_task.cancel()
            if self._request_handler_task:
                self._request_handler_task.cancel()
            try:
                raise RequestTimeout('Request Timeout')
            except RequestTimeout as exception:
                self.write_error(exception)

    # -------------------------------------------- #
    # Parsing
    # -------------------------------------------- #

    def data_received(self, data):
        # Check for the request itself getting too large and exceeding
        # memory limits
        self._total_request_size += len(data)
        if self._total_request_size > self.request_max_size:
            exception = PayloadTooLarge('Payload Too Large')
            self.write_error(exception)

        # Create parser if this is the first time we're receiving data
        if self.parser is None:
            assert self.request is None
            self.headers = []
            self.parser = HttpRequestParser(self)

        # requests count
        self.state['requests_count'] = self.state['requests_count'] + 1

        # Parse request chunk or close connection
        try:
            self.parser.feed_data(data)
        except HttpParserError:
            message = 'Bad Request'
            if self._debug:
                message += '\n' + traceback.format_exc()
            exception = InvalidUsage(message)
            self.write_error(exception)

    def on_url(self, url):
        if not self.url:
            self.url = url
        else:
            self.url += url

    def on_header(self, name, value):
        self._header_fragment += name

        if value is not None:
            if self._header_fragment == b'Content-Length' \
                    and int(value) > self.request_max_size:
                exception = PayloadTooLarge('Payload Too Large')
                self.write_error(exception)

            self.headers.append(
                    (self._header_fragment.decode().casefold(),
                     value.decode()))

            self._header_fragment = b''

    def on_headers_complete(self):
        self.request = self.request_class(
            url_bytes=self.url,
            headers=CIDict(self.headers),
            version=self.parser.get_http_version(),
            method=self.parser.get_method().decode(),
            transport=self.transport
        )
        if self.is_request_stream:
            self._is_stream_handler = self.router.is_stream_handler(
                self.request)
            if self._is_stream_handler:
                self.request.stream = asyncio.Queue()
                self.execute_request_handler()

    def on_body(self, body):
        if self.is_request_stream and self._is_stream_handler:
            self._request_stream_task = self.loop.create_task(
                self.request.stream.put(body))
            return
        self.request.body.append(body)

    def on_message_complete(self):
        if self.is_request_stream and self._is_stream_handler:
            self._request_stream_task = self.loop.create_task(
                self.request.stream.put(None))
            return
        self.request.body = b''.join(self.request.body)
        self.execute_request_handler()

    def execute_request_handler(self):
        self._request_handler_task = self.loop.create_task(
            self.request_handler(
                self.request,
                self.write_response,
                self.stream_response))

    # -------------------------------------------- #
    # Responding
    # -------------------------------------------- #
    def write_response(self, response):
        """
        Writes response content synchronously to the transport.
        """
        try:
            keep_alive = self.keep_alive
            self.transport.write(
                response.output(
                    self.request.version, keep_alive,
                    self.request_timeout))
            if self.has_log:
                netlog.info('', extra={
                    'status': response.status,
                    'byte': len(response.body),
                    'host': '{0}:{1}'.format(self.request.ip[0],
                                             self.request.ip[1]),
                    'request': '{0} {1}'.format(self.request.method,
                                                self.request.url)
                })
        except AttributeError:
            log.error(
                ('Invalid response object for url {}, '
                 'Expected Type: HTTPResponse, Actual Type: {}').format(
                    self.url, type(response)))
            self.write_error(ServerError('Invalid response type'))
        except RuntimeError:
            log.error(
                'Connection lost before response written @ {}'.format(
                    self.request.ip))
        except Exception as e:
            self.bail_out(
                "Writing response failed, connection closed {}".format(
                    repr(e)))
        finally:
            if not keep_alive:
                self.transport.close()
            else:
                self._last_request_time = current_time
                self.cleanup()

    async def stream_response(self, response):
        """
        Streams a response to the client asynchronously. Attaches
        the transport to the response so the response consumer can
        write to the response as needed.
        """

        try:
            keep_alive = self.keep_alive
            response.transport = self.transport
            await response.stream(
                self.request.version, keep_alive, self.request_timeout)
            if self.has_log:
                netlog.info('', extra={
                    'status': response.status,
                    'byte': -1,
                    'host': '{0}:{1}'.format(self.request.ip[0],
                                             self.request.ip[1]),
                    'request': '{0} {1}'.format(self.request.method,
                                                self.request.url)
                })
        except AttributeError:
            log.error(
                ('Invalid response object for url {}, '
                 'Expected Type: HTTPResponse, Actual Type: {}').format(
                    self.url, type(response)))
            self.write_error(ServerError('Invalid response type'))
        except RuntimeError:
            log.error(
                'Connection lost before response written @ {}'.format(
                    self.request.ip))
        except Exception as e:
            self.bail_out(
                "Writing response failed, connection closed {}".format(
                    repr(e)))
        finally:
            if not keep_alive:
                self.transport.close()
            else:
                self._last_request_time = current_time
                self.cleanup()

    def write_error(self, exception):
        response = None
        try:
            response = self.error_handler.response(self.request, exception)
            version = self.request.version if self.request else '1.1'
            self.transport.write(response.output(version))
        except RuntimeError:
            log.error(
                'Connection lost before error written @ {}'.format(
                    self.request.ip if self.request else 'Unknown'))
        except Exception as e:
            self.bail_out(
                "Writing error failed, connection closed {}".format(repr(e)),
                from_error=True)
        finally:
            if self.has_log:
                extra = dict()
                if isinstance(response, HTTPResponse):
                    extra['status'] = response.status
                    extra['byte'] = len(response.body)
                else:
                    extra['status'] = 0
                    extra['byte'] = -1
                if self.request:
                    extra['host'] = '%s:%d' % self.request.ip,
                    extra['request'] = '%s %s' % (self.request.method,
                                                  self.url)
                else:
                    extra['host'] = 'UNKNOWN'
                    extra['request'] = 'nil'
                if self.parser and not (self.keep_alive
                                        and extra['status'] == 408):
                    netlog.info('', extra=extra)
            self.transport.close()

    def bail_out(self, message, from_error=False):
        if from_error or self.transport.is_closing():
            log.error(
                ("Transport closed @ {} and exception "
                 "experienced during error handling").format(
                    self.transport.get_extra_info('peername')))
            log.debug(
                'Exception:\n{}'.format(traceback.format_exc()))
        else:
            exception = ServerError(message)
            self.write_error(exception)
            log.error(message)

    def cleanup(self):
        self.parser = None
        self.request = None
        self.url = None
        self.headers = None
        self._request_handler_task = None
        self._request_stream_task = None
        self._total_request_size = 0
        self._is_stream_handler = False

    def close_if_idle(self):
        """Close the connection if a request is not being sent or received

        :return: boolean - True if closed, false if staying open
        """
        if not self.parser:
            self.transport.close()
            return True
        return False

    def close(self):
        """
        Force close the connection.
        """
        if self.transport is not None:
            self.transport.close()
            self.transport = None
Exemplo n.º 33
0
class HttpProtocol(asyncio.Protocol):
    __slots__ = (
        # event loop, connection
        'loop',
        'transport',
        'connections',
        'signal',
        # request params
        'parser',
        'request',
        'url',
        'headers',
        # request config
        'request_handler',
        'request_timeout',
        'request_max_size',
        # connection management
        '_total_request_size',
        '_timeout_handler',
        '_last_communication_time')

    def __init__(self,
                 *,
                 loop,
                 request_handler,
                 error_handler,
                 signal=Signal(),
                 connections={},
                 request_timeout=60,
                 request_max_size=None):
        self.loop = loop
        self.transport = None
        self.request = None
        self.parser = None
        self.url = None
        self.headers = None
        self.signal = signal
        self.connections = connections
        self.request_handler = request_handler
        self.error_handler = error_handler
        self.request_timeout = request_timeout
        self.request_max_size = request_max_size
        self._total_request_size = 0
        self._timeout_handler = None
        self._last_request_time = None
        self._request_handler_task = None

    # -------------------------------------------- #
    # Connection
    # -------------------------------------------- #

    def connection_made(self, transport):
        self.connections[self] = True
        self._timeout_handler = self.loop.call_later(self.request_timeout,
                                                     self.connection_timeout)
        self.transport = transport
        self._last_request_time = current_time

    def connection_lost(self, exc):
        del self.connections[self]
        self._timeout_handler.cancel()
        self.cleanup()

    def connection_timeout(self):
        # Check if
        time_elapsed = current_time - self._last_request_time
        if time_elapsed < self.request_timeout:
            time_left = self.request_timeout - time_elapsed
            self._timeout_handler = \
                self.loop.call_later(time_left, self.connection_timeout)
        else:
            if self._request_handler_task:
                self._request_handler_task.cancel()
            response = self.error_handler.response(
                self.request, RequestTimeout('Request Timeout'))
            self.write_response(response)

    # -------------------------------------------- #
    # Parsing
    # -------------------------------------------- #

    def data_received(self, data):
        # Check for the request itself getting too large and exceeding
        # memory limits
        self._total_request_size += len(data)
        if self._total_request_size > self.request_max_size:
            return self.bail_out(
                "Request too large ({}), connection closed".format(
                    self._total_request_size))

        # Create parser if this is the first time we're receiving data
        if self.parser is None:
            assert self.request is None
            self.headers = []
            self.parser = HttpRequestParser(self)

        # Parse request chunk or close connection
        try:
            self.parser.feed_data(data)
        except HttpParserError as e:
            self.bail_out(
                "Invalid request data, connection closed ({})".format(e))

    def on_url(self, url):
        self.url = url

    def on_header(self, name, value):
        if name == b'Content-Length' and int(value) > self.request_max_size:
            return self.bail_out(
                "Request body too large ({}), connection closed".format(value))

        self.headers.append((name.decode(), value.decode('utf-8')))

    def on_headers_complete(self):
        remote_addr = self.transport.get_extra_info('peername')
        if remote_addr:
            self.headers.append(('Remote-Addr', '%s:%s' % remote_addr))

        self.request = Request(url_bytes=self.url,
                               headers=CIMultiDict(self.headers),
                               version=self.parser.get_http_version(),
                               method=self.parser.get_method().decode())

    def on_body(self, body):
        if self.request.body:
            self.request.body += body
        else:
            self.request.body = body

    def on_message_complete(self):
        self._request_handler_task = self.loop.create_task(
            self.request_handler(self.request, self.write_response))

    # -------------------------------------------- #
    # Responding
    # -------------------------------------------- #

    def write_response(self, response):
        try:
            keep_alive = self.parser.should_keep_alive() \
                            and not self.signal.stopped
            self.transport.write(
                response.output(self.request.version, keep_alive,
                                self.request_timeout))
            if not keep_alive:
                self.transport.close()
            else:
                # Record that we received data
                self._last_request_time = current_time
                self.cleanup()
        except Exception as e:
            self.bail_out(
                "Writing response failed, connection closed {}".format(e))

    def bail_out(self, message):
        log.debug(message)
        self.transport.close()

    def cleanup(self):
        self.parser = None
        self.request = None
        self.url = None
        self.headers = None
        self._request_handler_task = None
        self._total_request_size = 0

    def close_if_idle(self):
        """
        Close the connection if a request is not being sent or received
        :return: boolean - True if closed, false if staying open
        """
        if not self.parser:
            self.transport.close()
            return True
        return False
Exemplo n.º 34
0
class LuyProtocol(asyncio.Protocol):
    def __init__(self,
                 app,
                 loop=None,
                 keep_alive=True,
                 request_max_size=None,
                 has_stream=False,
                 debug=True):
        self.parser = None
        self.url = None
        self._request_handler_task = None
        self._request_stream_task = None
        self.request_max_size = request_max_size
        self._total_request_size = 0
        self.loop = loop
        self.header = {}
        self.app = app
        self.keep_alive = keep_alive
        self.has_stream = has_stream
        self.stream_handler = None

    def connection_made(self, transport):
        self.transport = transport
        if self.parser is None:
            self.parser = HttpRequestParser(self)

    def connection_lost(self, exc):

        self.transport.close()
        self.refresh()
        self.transport = None

    #-------------------------------------
    #               parsing
    #-------------------------------------
    def data_received(self, data):
        '''
        reveiving data from network.
        it is a streaming.
        has to check the data size for pretecting memory limits.
        '''
        if self.request_max_size:
            self._total_request_size += len(data)
            if self._total_request_size > self.request_max_size:
                # todo:payload too large,have to implement a method represent PAYLOAD TOO LARGE
                self.write_error()

        try:
            self.parser.feed_data(data)
        except HttpParserError as e:
            print('出错', e)
        finally:
            pass

    def on_message_begin(self):
        # print('on_message_begin')
        pass

    def on_url(self, url):
        # storing the url from web
        if not self.url:
            self.url = url
        else:
            self.url += url

    def on_header(self, name, value):
        '''
        Content-Length cannot be too long,
        protect the server
        '''
        if value is not None:
            if name == b'Content-Length' and int(value) > 1500:
                self.write_error()
            self.header[name.decode().casefold()] = value.decode()

    def on_headers_complete(self):
        self.request = request_class(url_bytes=self.url,
                                     header=self.header,
                                     version=self.parser.get_http_version(),
                                     method=self.parser.get_method().decode())

        # here is where we deal with "Expect: 100-Continue"
        # when user upload some big file or big things
        # their client would send a header with 'Expect: 100-Continue'
        # to if check the server can be accepted.
        if self.has_stream:
            self.stream_handler, kw = self.app.router.get_mapped_handle(
                self.request)
            if self.stream_handler and kw['stream']:
                # here is a coroutine queue
                # when await get()
                self.request.stream = asyncio.Queue()
                self.execute_request_handler()

    def on_body(self, body):
        if self.has_stream and self.stream_handler:
            self.loop.create_task(self.request.stream.put(body))
            return

        self.request.body.append(body)

    def on_message_complete(self):

        # None is the signal for task to stop
        if self.has_stream and self.stream_handler:
            self.loop.create_task(self.request.stream.put(None))
            return

        self.execute_request_handler()

    def execute_request_handler(self):
        self._request_handler_task = self.loop.create_task(
            self.app.request_handler(self.request, self.write_response,
                                     self.stream_callback))

    #---------------------------
    #      error handling
    #---------------------------
    def write_error(self):
        response = html('bad connecton', status=400)
        self.write_response(response)
        self.transport.close()

    #-------------------------------------
    #            write response
    #-------------------------------------

    def write_response(self, response):
        '''
        the writing phase is very fast
        so may not have to use coroutine
        '''
        try:
            keep_alive = self.keep_alive
            self.transport.write(response.drain(keep_alive=keep_alive))
            if keep_alive:
                self.refresh()
            else:
                self.transport.close()
        except AttributeError as e:
            print('AttributeError????', e)
            self.transport.close()
        except RuntimeError as e:
            print('RuntimeError????', e)
            self.transport.close()
        except Exception as e:
            print('Exception????', e)
            self.transport.close()

    async def stream_callback(self, response):
        '''

        '''
        try:
            keep_alive = self.keep_alive
            response.transport = self.transport
            await response.stream_output(self.request.version, keep_alive)
            if keep_alive:
                self.refresh()
            else:
                self.transport.close()

        except AttributeError as e:
            print('AttributeError????', e)
            self.transport.close()
        except RuntimeError as e:
            print('RuntimeError????', e)
            self.transport.close()
        except Exception as e:
            print('Exception????', e)
            self.transport.close()

    def refresh(self):
        '''
        refresh the server state
        prepare for next incoming request
        '''
        self.url = None
        self.header = {}
        self._request_handler_task = None
        self.stream_handler = None