def fetch(self, request, raise_error=True, **kwargs): """Executes a request, asynchronously returning an `HTTPResponse`. The request may be either a string URL or an `HTTPRequest` object. If it is a string, we construct an `HTTPRequest` using any additional kwargs: ``HTTPRequest(request, **kwargs)`` This method returns a `.Future` whose result is an `HTTPResponse`. By default, the ``Future`` will raise an `HTTPError` if the request returned a non-200 response code (other errors may also be raised if the server could not be contacted). Instead, if ``raise_error`` is set to False, the response will always be returned regardless of the response code. If a ``callback`` is given, it will be invoked with the `HTTPResponse`. In the callback interface, `HTTPError` is not automatically raised. Instead, you must check the response's ``error`` attribute or call its `~HTTPResponse.rethrow` method. .. versionchanged:: 6.0 The ``callback`` argument was removed. Use the returned `.Future` instead. The ``raise_error=False`` argument only affects the `HTTPError` raised when a non-200 response code is used, instead of suppressing all errors. """ if self._closed: raise RuntimeError("fetch() called on closed AsyncHTTPClient") if not isinstance(request, HTTPRequest): request = HTTPRequest(url=request, **kwargs) else: if kwargs: raise ValueError( "kwargs can't be used if request is an HTTPRequest object") # We may modify this (to add Host, Accept-Encoding, etc), # so make sure we don't modify the caller's object. This is also # where normal dicts get converted to HTTPHeaders objects. request.headers = httputil.HTTPHeaders(request.headers) request = _RequestProxy(request, self.defaults) future = Future() def handle_response(response): if response.error: if raise_error or not response._error_is_response_code: future.set_exception(response.error) return future_set_result_unless_cancelled(future, response) self.fetch_impl(request, handle_response) return future
async def proxy(self, host, port, proxied_path): ''' This serverextension handles: {base_url}/proxy/{port([0-9]+)}/{proxied_path} {base_url}/proxy/absolute/{port([0-9]+)}/{proxied_path} {base_url}/{proxy_base}/{proxied_path} ''' if 'Proxy-Connection' in self.request.headers: del self.request.headers['Proxy-Connection'] self._record_activity() if self.request.headers.get("Upgrade", "").lower() == 'websocket': # We wanna websocket! # jupyterhub/jupyter-server-proxy@36b3214 self.log.info( "we wanna websocket, but we don't define WebSocketProxyHandler" ) self.set_status(500) body = self.request.body if not body: if self.request.method == 'POST': body = b'' else: body = None client = httpclient.AsyncHTTPClient() req = self._build_proxy_request(host, port, proxied_path, body) response = await client.fetch(req, raise_error=False) # record activity at start and end of requests self._record_activity() # For all non http errors... if response.error and type(response.error) is not httpclient.HTTPError: self.set_status(500) self.write(str(response.error)) else: self.set_status(response.code, response.reason) # clear tornado default header self._headers = httputil.HTTPHeaders() for header, v in response.headers.get_all(): if header not in ('Content-Length', 'Transfer-Encoding', 'Content-Encoding', 'Connection'): # some header appear multiple times, eg 'Set-Cookie' self.add_header(header, v) if response.body: self.write(response.body)
def fetch(self, request, callback=None, **kwargs): """Executes a request, asynchronously returning an `HTTPResponse`. The request may be either a string URL or an `HTTPRequest` object. If it is a string, we construct an `HTTPRequest` using any additional kwargs: ``HTTPRequest(request, **kwargs)`` This method returns a `.Future` whose result is an `HTTPResponse`. The ``Future`` will raise an `HTTPError` if the request returned a non-200 response code. If a ``callback`` is given, it will be invoked with the `HTTPResponse`. In the callback interface, `HTTPError` is not automatically raised. Instead, you must check the response's ``error`` attribute or call its `~HTTPResponse.rethrow` method. """ if self._closed: raise RuntimeError("fetch() called on closed AsyncHTTPClient") if not isinstance(request, HTTPRequest): request = HTTPRequest(url=request, **kwargs) # We may modify this (to add Host, Accept-Encoding, etc), # so make sure we don't modify the caller's object. This is also # where normal dicts get converted to HTTPHeaders objects. request.headers = httputil.HTTPHeaders(request.headers) request = _RequestProxy(request, self.defaults) future = TracebackFuture() if callback is not None: callback = stack_context.wrap(callback) def handle_future(future): exc = future.exception() if isinstance(exc, HTTPError) and exc.response is not None: response = exc.response elif exc is not None: response = HTTPResponse(request, 599, error=exc, request_time=time.time() - request.start_time) else: response = future.result() self.io_loop.add_callback(callback, response) future.add_done_callback(handle_future) def handle_response(response): if response.error: future.set_exception(response.error) else: future.set_result(response) self.fetch_impl(request, handle_response) return future
def __init__(self, environ): """Parses the given WSGI environ to construct the request.""" self.method = environ["REQUEST_METHOD"] self.path = urllib.quote(environ.get("SCRIPT_NAME", "")) self.path += urllib.quote(environ.get("PATH_INFO", "")) self.uri = self.path self.arguments = {} self.query = environ.get("QUERY_STRING", "") if self.query: self.uri += "?" + self.query arguments = parse_qs_bytes(native_str(self.query)) for name, values in arguments.iteritems(): values = [v for v in values if v] if values: self.arguments[name] = values self.version = "HTTP/1.1" self.headers = httputil.HTTPHeaders() if environ.get("CONTENT_TYPE"): self.headers["Content-Type"] = environ["CONTENT_TYPE"] if environ.get("CONTENT_LENGTH"): self.headers["Content-Length"] = environ["CONTENT_LENGTH"] for key in environ: if key.startswith("HTTP_"): self.headers[key[5:].replace("_", "-")] = environ[key] if self.headers.get("Content-Length"): self.body = environ["wsgi.input"].read( int(self.headers["Content-Length"])) else: self.body = "" self.protocol = environ["wsgi.url_scheme"] self.remote_ip = environ.get("REMOTE_ADDR", "") if environ.get("HTTP_HOST"): self.host = environ["HTTP_HOST"] else: self.host = environ["SERVER_NAME"] # Parse request body self.files = {} content_type = self.headers.get("Content-Type", "") if content_type.startswith("application/x-www-form-urlencoded"): for name, values in parse_qs_bytes(native_str(self.body)).iteritems(): self.arguments.setdefault(name, []).extend(values) elif content_type.startswith("multipart/form-data"): if 'boundary=' in content_type: boundary = content_type.split('boundary=', 1)[1] if boundary: httputil.parse_multipart_form_data( utf8(boundary), self.body, self.arguments, self.files) else: logging.warning("Invalid multipart/form-data") self._start_time = monotime() self._finish_time = None
def connect(self, url): headers = httputil.HTTPHeaders({ 'Content-Type': self.config['WEBSOCKET_CLIENT']['APPLICATION_JSON'] }) request = httpclient.HTTPRequest(url=url, connect_timeout=self.connect_timeout, request_timeout=self.request_timeout, headers=headers) ws_conn = websocket.WebSocketClientConnection(ioloop.IOLoop.current(), request) ws_conn.connect_future.add_done_callback(self._connect_callback)
def connect(self, url): """Connect to the server. :param str url: server URL. """ headers = httputil.HTTPHeaders({'Content-Type': APPLICATION_JSON}) request = httpclient.HTTPRequest(url=url, connect_timeout=self.connect_timeout, request_timeout=self.request_timeout, headers=headers) ws = websocket_connect(url) ws.add_done_callback(self._connect_callback)
def test_process_bogus_response(self): content = b'Slow Down' stream = io.BytesIO(content) request = httpclient.HTTPRequest('/') headers = httputil.HTTPHeaders({ 'x-amzn-RequestId': '3840c615-0503-4a53-a2f6-07afa795a5d6', 'Date': 'Tue, 06 Jun 2017 18:31:47 GMT' }) response = httpclient.HTTPResponse(request, 503, headers, stream) error = httpclient.HTTPError(503, 'Bad Request', response) with self.client_with_default_creds('s3') as obj: self.assertEqual(obj._process_error(error), (False, None))
def connect(self, url): """Connect to the server. :param str url: server URL. """ self._connect_status = self.CONNECTING headers = httputil.HTTPHeaders({'Content-Type': APPLICATION_JSON}) request = httpclient.HTTPRequest(url=url, connect_timeout=self.connect_timeout, request_timeout=self.request_timeout, headers=headers) ws_conn = websocket.WebSocketClientConnection(self._io_loop, request) ws_conn.connect_future.add_done_callback(self._connect_callback)
def fetch(self, request, callback=None, raise_error=True, **kwargs): """执行一个请求, 并且异步的返回 `HTTPResponse`. request 参数可以是一个 URL 字符串也可以是一个 `HTTPRequest` 对象. 如果是一个字符串, 我们将使用全部的关键字参数一起构造一个 `HTTPRequest` 对象: ``HTTPRequest(request, **kwargs)`` 这个方法返回一个结果为 `HTTPResponse` 的 `.Future` 对象. 默认情况下, 如果该请求返回一个非 200 的响应码, 这个 ``Future`` 将会抛出一个 `HTTPError` 错误. 相反, 如果 ``raise_error`` 设置为 False, 则无论响应码如何, 都将返回该 response (响应). 如果给定了 ``callback`` , 它将被 `HTTPResponse` 调用. 在回调接口中, `HTTPError` 不会自动抛出. 相反你必须检查该响应的 ``error`` 属性或者调用它的 `~HTTPResponse.rethrow` 方法. """ if self._closed: raise RuntimeError("fetch() called on closed AsyncHTTPClient") if not isinstance(request, HTTPRequest): request = HTTPRequest(url=request, **kwargs) # We may modify this (to add Host, Accept-Encoding, etc), # so make sure we don't modify the caller's object. This is also # where normal dicts get converted to HTTPHeaders objects. request.headers = httputil.HTTPHeaders(request.headers) request = _RequestProxy(request, self.defaults) future = TracebackFuture() if callback is not None: callback = stack_context.wrap(callback) def handle_future(future): exc = future.exception() if isinstance(exc, HTTPError) and exc.response is not None: response = exc.response elif exc is not None: response = HTTPResponse(request, 599, error=exc, request_time=time.time() - request.start_time) else: response = future.result() self.io_loop.add_callback(callback, response) future.add_done_callback(handle_future) def handle_response(response): if raise_error and response.error: future.set_exception(response.error) else: future.set_result(response) self.fetch_impl(request, handle_response) return future
def _handle_request(self, request): """ Callback method called directly by the HTTP server. This method decodes received HTTP request and calls provided upper layer receive_cb() method which process decoded primitive and returns another primitive object as result. The resulting primitive object is encoded to HTTP response message and sent back to client. """ primitive = self.decoder.decode(request) rsp_primitive = self.receive_cb(primitive) if not rsp_primitive: code = httplib.INTERNAL_SERVER_ERROR reason = status_codes._codes[code] start_line = httputil.ResponseStartLine(version='HTTP/1.1', code=code, reason=reason) request.connection.write_headers(start_line, httputil.HTTPHeaders()) request.finish() return encoded = self.encoder.encode(rsp_primitive) headers = httputil.HTTPHeaders() headers.update(encoded.headers) code = encoded.status_code reason = encoded.reason start_line = httputil.ResponseStartLine(version='HTTP/1.1', code=code, reason=reason) request.connection.write_headers(start_line, headers) # set content if encoded.content: request.connection.write(json.dumps(encoded.content)) request.finish()
def __init__(self, method, uri, version="HTTP/1.0", headers=None, body=None, remote_ip=None, protocol=None, host=None, files=None, connection=None): self.method = method self.uri = uri self.version = version self.headers = headers or httputil.HTTPHeaders() self.body = body or "" if connection and connection.xheaders: # Squid uses X-Forwarded-For, others use X-Real-Ip self.remote_ip = self.headers.get( "X-Real-Ip", self.headers.get("X-Forwarded-For", remote_ip)) if not self._valid_ip(self.remote_ip): self.remote_ip = remote_ip # AWS uses X-Forwarded-Proto self.protocol = self.headers.get( "X-Scheme", self.headers.get("X-Forwarded-Proto", protocol)) if self.protocol not in ("http", "https"): self.protocol = "http" else: self.remote_ip = remote_ip if protocol: self.protocol = protocol elif connection and isinstance(connection.stream, iostream.SSLIOStream): self.protocol = "https" else: self.protocol = "http" self.host = host or self.headers.get("Host") or "127.0.0.1" self.files = files or {} self.connection = connection self._start_time = time.time() self._finish_time = None scheme, netloc, path, query, fragment = urlparse.urlsplit( native_str(uri)) self.path = path self.query = query arguments = parse_qs_bytes(query) self.arguments = {} for name, values in arguments.iteritems(): values = [v for v in values if v] if values: self.arguments[name] = values
def websocket_connect(url, io_loop=None, callback=None, connect_timeout=None, on_message_callback=None, compression_options=None, ping_interval=None, ping_timeout=None): """Client-side websocket support. Takes a url and returns a Future whose result is a `WebSocketClientConnection`. ``compression_options`` is interpreted in the same way as the return value of `.WebSocketHandler.get_compression_options`. The connection supports two styles of operation. In the coroutine style, the application typically calls `~.WebSocketClientConnection.read_message` in a loop:: conn = yield websocket_connect(url) while True: msg = yield conn.read_message() if msg is None: break # Do something with msg In the callback style, pass an ``on_message_callback`` to ``websocket_connect``. In both styles, a message of ``None`` indicates that the connection has been closed. .. versionchanged:: 3.2 Also accepts ``HTTPRequest`` objects in place of urls. .. versionchanged:: 4.1 Added ``compression_options`` and ``on_message_callback``. The ``io_loop`` argument is deprecated. """ if io_loop is None: io_loop = IOLoop.current() if isinstance(url, httpclient.HTTPRequest): assert connect_timeout is None request = url # Copy and convert the headers dict/object (see comments in # AsyncHTTPClient.fetch) request.headers = httputil.HTTPHeaders(request.headers) else: request = httpclient.HTTPRequest(url, connect_timeout=connect_timeout) request = httpclient._RequestProxy( request, httpclient.HTTPRequest._DEFAULTS) conn = WebSocketClientConnection(io_loop, request, on_message_callback=on_message_callback, compression_options=compression_options, ping_interval=ping_interval, ping_timeout=ping_timeout) if callback is not None: io_loop.add_future(conn.connect_future, callback) return conn.connect_future
def fetch(self, request): """ Asynchronously fetches a request. Returns a Future that will resolve when the request finishes or when an error occurs. :param request: client request :type request: tornado.httpclient.HTTPRequest :return: Future """ if self.no_more_stream_ids: raise h2.exceptions.NoAvailableStreamId() # prepare the request object request.headers = httputil.HTTPHeaders(request.headers) request = _RequestProxy(request, dict(HTTPRequest._DEFAULTS)) # wrap everything in a Future future = Future() def handle_response(response): """ Will be called by HTTP2Stream on request finished """ if isinstance(response, HTTPResponse): if response.error: future.set_exception(response.error) else: future.set_result(response) else: future.set_exception(response) # unique request key key = object() # put the request in the pending queue self.pending_requests.append((key, request, handle_response)) # if we are already processing maximum concurrent requests, # set a timeout for the time spent in queue if (not self.connection or not self.connection.is_ready or len(self.active_requests) >= self.max_active_requests): timeout_handle = IOLoop.current().add_timeout( IOLoop.current().time() + request.request_timeout, functools.partial(self.on_queue_timeout, key)) else: timeout_handle = None # add the timeout for the queue self.queue_timeouts[key] = (request, handle_response, timeout_handle) self.process_pending_requests() if self.pending_requests: log.debug('Queued request, {} active, {} in queue.'.format( len(self.active_requests), len(self.pending_requests))) return future
def __init__(self, method, uri, version="HTTP/1.0", headers=None, body=None, remote_ip=None, protocol=None, host=None, files=None, connection=None): self.method = method self.uri = uri self.version = version self.headers = headers or httputil.HTTPHeaders() self.body = body or "" # set remote IP and protocol self.remote_ip = remote_ip if protocol: self.protocol = protocol elif connection and isinstance(connection.stream, iostream.SSLIOStream): self.protocol = "https" else: self.protocol = "http" # xheaders can override the defaults if connection and connection.xheaders: # Squid uses X-Forwarded-For, others use X-Real-Ip ip = self.headers.get("X-Forwarded-For", self.remote_ip) ip = ip.split(',')[-1].strip() ip = self.headers.get("X-Real-Ip", ip) if netutil.is_valid_ip(ip): self.remote_ip = ip # AWS uses X-Forwarded-Proto proto = self.headers.get( "X-Scheme", self.headers.get("X-Forwarded-Proto", self.protocol)) if proto in ("http", "https"): self.protocol = proto self.host = host or self.headers.get("Host") or "127.0.0.1" self.files = files or {} self.connection = connection self._start_time = time.time() self._finish_time = None self.path, sep, self.query = uri.partition('?') self.arguments = parse_qs_bytes(self.query, keep_blank_values=True) self.query_arguments = copy.deepcopy(self.arguments) self.body_arguments = {}
def __call__(self, request): data = {} response = [] def start_response(status, response_headers, exc_info=None): data["status"] = status data["headers"] = response_headers return response.append app_response = self.wsgi_app( WSGIContainer.environ(request), start_response) try: response.extend(app_response) body = b"".join(response) finally: if hasattr(app_response, "close"): app_response.close() if not data: raise Exception("WSGI app did not call start_response") status_code, reason = data["status"].split(' ', 1) status_code = int(status_code) headers = data["headers"] header_set = set(k.lower() for (k, v) in headers) body = escape.utf8(body) if HEAD_END in body: body = body.replace(HEAD_END, self.script + HEAD_END) if status_code != 304: if "content-type" not in header_set: headers.append(( "Content-Type", "application/octet-stream; charset=UTF-8" )) if "content-length" not in header_set: headers.append(("Content-Length", str(len(body)))) if "server" not in header_set: headers.append(("Server", "LiveServer")) start_line = httputil.ResponseStartLine( "HTTP/1.1", status_code, reason ) header_obj = httputil.HTTPHeaders() for key, value in headers: if key.lower() == 'content-length': value = str(len(body)) header_obj.add(key, value) request.connection.write_headers(start_line, header_obj, chunk=body) request.connection.finish() self._log(status_code, request)
def __call__(self, request: httputil.HTTPServerRequest) -> None: data = {} # type: Dict[str, Any] response = [] # type: List[bytes] def start_response( status: str, headers: List[Tuple[str, str]], exc_info: Optional[ Tuple[ "Optional[Type[BaseException]]", Optional[BaseException], Optional[TracebackType], ] ] = None, ) -> Callable[[bytes], Any]: data["status"] = status data["headers"] = headers return response.append app_response = self.wsgi_application( WSGIContainer.environ(request), start_response ) try: response.extend(app_response) body = b"".join(response) finally: if hasattr(app_response, "close"): app_response.close() # type: ignore if not data: raise Exception("WSGI app did not call start_response") status_code_str, reason = data["status"].split(" ", 1) status_code = int(status_code_str) headers = data["headers"] # type: List[Tuple[str, str]] header_set = set(k.lower() for (k, v) in headers) body = escape.utf8(body) if status_code != 304: if "content-length" not in header_set: headers.append(("Content-Length", str(len(body)))) if "content-type" not in header_set: headers.append(("Content-Type", "text/html; charset=UTF-8")) if "server" not in header_set: headers.append(("Server", "TornadoServer/%s" % tornado.version)) start_line = httputil.ResponseStartLine("HTTP/1.1", status_code, reason) header_obj = httputil.HTTPHeaders() for key, value in headers: header_obj.add(key, value) assert request.connection is not None request.connection.write_headers(start_line, header_obj, chunk=body) request.connection.finish() self._log(status_code, request)
def connect(self): logging.info('[connect] starting...') self._connect_status = DataConnection.CONNECTING headers = httputil.HTTPHeaders({'Content-Type': 'application/json'}) request = httpclient.HTTPRequest(url="wss://api.huobi.br.com/ws", connect_timeout=self.connect_timeout, request_timeout=self.request_timeout, headers=headers) self._ws_connection = tornado.websocket.WebSocketClientConnection( request) self._ws_connection.connect_future.add_done_callback(self._on_open)
def connect(self, url): """Connect to the server. :param str url: server URL. """ headers = httputil.HTTPHeaders({'Content-Type': APPLICATION_JSON}) request = httpclient.HTTPRequest(url=url, connect_timeout=self.connect_timeout, request_timeout=self.request_timeout, headers=headers) self.ws_conn = websocket.WebSocketClientConnection(ioloop.IOLoop.current(), request) self.ws_conn.connect_future.add_done_callback(self._connect_callback)
def clear(self): self._headers = httputil.HTTPHeaders({ 'Server': self.config.web.get('name'), 'Content-Type': 'application/json; charset=UTF-8', 'Date': httputil.format_timestamp(time.time()), }) self.set_default_headers() self._write_buffer = [] self._status_code = HTTPStatus.OK.value self._reason = HTTPStatus.OK.value
def _connect(self): """Connect to the server using tornado's httpclient. :param url: server URL. :type url: str. """ headers = httputil.HTTPHeaders({'Content-Type': APPLICATION_JSON}) request = httpclient.HTTPRequest(url=self.url, connect_timeout=self.connect_timeout, request_timeout=self.request_timeout, headers=headers) ws_conn = websocket.websocket_connect(request.url) ws_conn.add_done_callback(self._connect_callback)
def clear(self): """Resets all headers and content for this response.""" self._headers = httputil.HTTPHeaders({"Server": SERVER_NAME}) self.set_default_headers() if (not self.request.supports_http_1_1() and getattr(self.request, 'connection', None) and not self.request.connection.no_keep_alive): conn_header = self.request.headers.get("Connection") if conn_header and (conn_header.lower() == "keep-alive"): self._headers["Connection"] = "Keep-Alive" self._write_buffer = [] self._status_code = 200 self._reason = httputil.responses[200]
def __init__(self, url, method="GET", headers=None, body=None, auth_username=None, auth_password=None, connect_timeout=20.0, request_timeout=20.0, if_modified_since=None, follow_redirects=True, max_redirects=5, user_agent=None, use_gzip=True, network_interface=None, streaming_callback=None, header_callback=None, prepare_curl_callback=None, proxy_host=None, proxy_port=None, proxy_username=None, proxy_password='', allow_nonstandard_methods=False): if headers is None: headers = httputil.HTTPHeaders() if if_modified_since: timestamp = calendar.timegm(if_modified_since.utctimetuple()) headers["If-Modified-Since"] = email.utils.formatdate( timestamp, localtime=False, usegmt=True) if "Pragma" not in headers: headers["Pragma"] = "" # Proxy support: proxy_host and proxy_port must be set to connect via # proxy. The username and password credentials are optional. self.proxy_host = proxy_host self.proxy_port = proxy_port self.proxy_username = proxy_username self.proxy_password = proxy_password # libcurl's magic "Expect: 100-continue" behavior causes delays # with servers that don't support it (which include, among others, # Google's OpenID endpoint). Additionally, this behavior has # a bug in conjunction with the curl_multi_socket_action API # (https://sourceforge.net/tracker/?func=detail&atid=100976&aid=3039744&group_id=976), # which increases the delays. It's more trouble than it's worth, # so just turn off the feature (yes, setting Expect: to an empty # value is the official way to disable this) if "Expect" not in headers: headers["Expect"] = "" self.url = _utf8(url) self.method = method self.headers = headers self.body = body self.auth_username = _utf8(auth_username) self.auth_password = _utf8(auth_password) self.connect_timeout = connect_timeout self.request_timeout = request_timeout self.follow_redirects = follow_redirects self.max_redirects = max_redirects self.user_agent = user_agent self.use_gzip = use_gzip self.network_interface = network_interface self.streaming_callback = streaming_callback self.header_callback = header_callback self.prepare_curl_callback = prepare_curl_callback self.allow_nonstandard_methods = allow_nonstandard_methods self.start_time = time.time()
def websocket_connect(url, io_loop=None, callback=None, connect_timeout=None, on_message_callback=None, compression_options=None): """客户端 WebSocket 支持 需要指定 url, 返回一个结果为 `WebSocketClientConnection` 的 Future 对象 ``compression_options`` 作为 `.WebSocketHandler.get_compression_options` 的 返回值, 将会以同样的方式执行. 这个连接支持两种类型的操作.在协程风格下,应用程序通常在一个循环里调用`~.WebSocket ClientConnection.read_message`:: conn = yield websocket_connect(url) while True: msg = yield conn.read_message() if msg is None: break # Do something with msg 在回调风格下,需要传递 ``on_message_callback`` 到 ``websocket_connect`` 里. 在这两种风格里,一个内容是 ``None`` 的 message 都标志着 WebSocket 连接已经. .. versionchanged:: 3.2 允许使用 ``HTTPRequest`` 对象来代替 urls. .. versionchanged:: 4.1 添加 ``compression_options`` 和 ``on_message_callback`` . 不赞成使用 ``compression_options`` . """ if io_loop is None: io_loop = IOLoop.current() if isinstance(url, httpclient.HTTPRequest): assert connect_timeout is None request = url # Copy and convert the headers dict/object (see comments in # AsyncHTTPClient.fetch) request.headers = httputil.HTTPHeaders(request.headers) else: request = httpclient.HTTPRequest(url, connect_timeout=connect_timeout) request = httpclient._RequestProxy(request, httpclient.HTTPRequest._DEFAULTS) conn = WebSocketClientConnection(io_loop, request, on_message_callback=on_message_callback, compression_options=compression_options) if callback is not None: io_loop.add_future(conn.connect_future, callback) return conn.connect_future
def clear(self): """Resets all headers and content for this response.""" self._headers = httputil.HTTPHeaders({ "Server": "Durotar/%s" % durotar.version, "Content-Type": "text/html; charset=UTF-8", "Date": httputil.format_timestamp(time.time()), }) self.set_default_headers() self._write_buffer = [] self._status_code = 200 self._reason = httputil.responses[200]
def connect(self, **kwargs): """Opens a connection to the websocket url.""" if self._ws_connection is not None: raise RuntimeError("Websocket connection already open.") headers = httputil.HTTPHeaders({"Content-Type": "application/json"}) request = httpclient.HTTPRequest(url=self._url, connect_timeout=self._connect_timeout, request_timeout=self._request_timeout, headers=headers) ws_future = websocket.websocket_connect(request) ws_future.add_done_callback(self._connect_callback)
def handle_response(self, response: Any) -> None: if response.error and not isinstance(response.error, httpclient.HTTPError): self.set_status(500) self.write('Internal server error:\n' + str(response.error)) else: self.set_status(response.code, response.reason) self._headers = httputil.HTTPHeaders() # clear tornado default header for header, v in response.headers.get_all(): # some header appear multiple times, eg 'Set-Cookie' self.add_header(header, v) if response.body: self.write(response.body) self.finish()
async def authenticate(self, handler, data): """Authenticate with SSH Auth API, and return the private key if login is successful. Return None otherwise. """ username = data['username'].lower() pwd = data['password'] try: request = httpclient.AsyncHTTPClient() if self.skey != '': headers = httputil.HTTPHeaders({'content-type': 'application/json'}) body = json.dumps({'skey': self.skey}) resp = await request.fetch(self.server, raise_error=False, method='POST', headers=headers, auth_username=username, auth_password=pwd, body=body) else: resp = await request.fetch(self.server, raise_error=False, method='POST', headers=None, auth_username=username, auth_password=pwd) if resp.code == 200: file = Path(self.cert_path)/f'{username}.key' self._write_key(file, resp.body.decode(self.encoding)) else: message = ( f'SSH Auth API Authentication failed for' f' {username}@{handler.request.remote_ip}' f' with error {resp.code}: "{resp.reason}"' ) self.log.warning(message) return None except: message = f'SSH Auth API Authentication failed for user "{username}"' if handler is not None: message = ( f'SSH Auth API Authentication failed for' f' {username}@{handler.request.remote_ip}' ) self.log.warning(message) return None else: return username
def __init__(self, environ): """Parses the given WSGI environment to construct the request.""" self.method = environ["REQUEST_METHOD"] self.path = urllib_parse.quote( from_wsgi_str(environ.get("SCRIPT_NAME", ""))) self.path += urllib_parse.quote( from_wsgi_str(environ.get("PATH_INFO", ""))) self.uri = self.path self.arguments = {} self.query_arguments = {} self.body_arguments = {} self.query = environ.get("QUERY_STRING", "") if self.query: self.uri += "?" + self.query self.arguments = parse_qs_bytes(native_str(self.query), keep_blank_values=True) self.query_arguments = copy.deepcopy(self.arguments) self.version = "HTTP/1.1" self.headers = httputil.HTTPHeaders() if environ.get("CONTENT_TYPE"): self.headers["Content-Type"] = environ["CONTENT_TYPE"] if environ.get("CONTENT_LENGTH"): self.headers["Content-Length"] = environ["CONTENT_LENGTH"] for key in environ: if key.startswith("HTTP_"): self.headers[key[5:].replace("_", "-")] = environ[key] if self.headers.get("Content-Length"): self.body = environ["wsgi.input"].read( int(self.headers["Content-Length"])) else: self.body = "" self.protocol = environ["wsgi.url_scheme"] self.remote_ip = environ.get("REMOTE_ADDR", "") if environ.get("HTTP_HOST"): self.host = environ["HTTP_HOST"] else: self.host = environ["SERVER_NAME"] # Parse request body self.files = {} httputil.parse_body_arguments(self.headers.get("Content-Type", ""), self.body, self.body_arguments, self.files) for k, v in self.body_arguments.items(): self.arguments.setdefault(k, []).extend(v) self._start_time = time.time() self._finish_time = None
def __init__(self, method, uri, version="HTTP/1.0", headers=None, body=None, remote_ip=None, protocol=None, host=None, files=None, connection=None, arguments=None, idents=None, msg_id=None, stream=None): # ZMQWEB NOTE: This method is copied from the base class to make a # number of changes. We have added the arguments, ident, msg_id and # stream kwargs. self.method = method self.uri = uri self.version = version self.headers = headers or httputil.HTTPHeaders() self.body = body or "" # ZMQWEB NOTE: We simply copy the remote_ip, protocol and host as they # have been parsed by the other side. self.remote_ip = remote_ip self.protocol = protocol self.host = host self.files = files or {} # ZMQWEB NOTE: The connection attribute MUST not be saved in the # instance. This is because its precense triggers logic in the base # class that doesn't apply because ZeroMQ sockets are connectionless. self._start_time = time.time() self._finish_time = None # ZMQWEB NOTE: Attributes we have added to ZMQHTTPRequest. self.idents = idents self.msg_id = msg_id self.stream = stream self._chunks = [] self._write_callback = None scheme, netloc, path, query, fragment = urlparse.urlsplit( native_str(uri)) self.path = path self.query = query # ZMQWEB NOTE: We let the other side parse the arguments and simply # pass them into this class. self.arguments = arguments
def proxy(self, port, proxied_path): ''' While self.request.uri is (hub) /user/username/proxy/([0-9]+)/something. (single) /proxy/([0-9]+)/something This serverextension is given {port}/{everything/after}. ''' if 'Proxy-Connection' in self.request.headers: del self.request.headers['Proxy-Connection'] body = self.request.body if not body: body = None client_uri = '{uri}:{port}{path}'.format(uri=self.proxy_uri, port=port, path=proxied_path) if self.request.query: client_uri += '?' + self.request.query client = httpclient.AsyncHTTPClient() req = httpclient.HTTPRequest(client_uri, method=self.request.method, body=body, headers=self.request.headers, follow_redirects=False) response = yield client.fetch(req, raise_error=False) # For all non http errors... if response.error and type(response.error) is not httpclient.HTTPError: self.set_status(500) self.write(str(response.error)) else: self.set_status(response.code, response.reason) # clear tornado default header self._headers = httputil.HTTPHeaders() for header, v in response.headers.get_all(): if header not in ('Content-Length', 'Transfer-Encoding', 'Content-Encoding', 'Connection'): # some header appear multiple times, eg 'Set-Cookie' self.add_header(header, v) if response.body: self.write(response.body)