def prepare_response(self, *, cookies=None, data=b'', err=None, headers=None, status=200): def make_side_effect(queue): def side_effect(*args, **kwargs): fut = asyncio.Future(loop=self.loop) if queue: resp.content.at_eof.return_value = False fut.set_result(queue.popleft()) elif err: fut.set_exception(err) else: resp.content.at_eof.return_value = True fut.set_result(b'') return fut return side_effect headers = CIMultiDict(headers or {}) headers.setdefault('CONTENT-TYPE', 'application/json') cookies = cookies or {} if isinstance(data, list): chunks_queue = deque(data) lines_queue = deque((b''.join(data)).splitlines(keepends=True)) else: chunks_queue = deque([data]) lines_queue = deque(data.splitlines(keepends=True)) resp = aiocouchdb.client.HttpResponse( 'get', URL('http://localhost/whatever'), request_info=mock.Mock(), writer=mock.Mock(), continue100=None, timer=TimerNoop(), traces=[], loop=self.loop, session=mock.Mock(), ) resp.status = status resp._headers = headers resp.cookies = cookies resp.content = unittest.mock.Mock() resp.content._buffer = bytearray() resp.content.at_eof.return_value = False resp.content.read.side_effect = make_side_effect(chunks_queue) resp.content.readany.side_effect = make_side_effect(chunks_queue) resp.content.readline.side_effect = make_side_effect(lines_queue) resp.close = mock.Mock(side_effect=resp.close) return resp
def from_raw_headers(cls, raw_headers): headers = CIMultiDict() decoded_headers = raw_headers.decode().split(utils.EOL) for line in decoded_headers[1:]: k, v = line.split(': ', 1) if k in headers: o = headers.setdefault(k, []) if not isinstance(o, list): o = [o] o.append(v) headers[k] = o else: headers[k] = v m = FIRST_LINE_PATTERN['response']['regex'].match(decoded_headers[0]) if m: d = m.groupdict() return Response(status_code=int(d['status_code']), status_message=d['status_message'], headers=headers, first_line=decoded_headers[0]) else: m = FIRST_LINE_PATTERN['request']['regex'].match( decoded_headers[0]) if m: d = m.groupdict() cseq, _ = headers['CSeq'].split() return Request(method=d['method'], headers=headers, cseq=int(cseq), first_line=decoded_headers[0]) else: LOG.debug(decoded_headers) raise ValueError('Not a SIP message')
def from_raw_headers(cls, raw_headers): headers = CIMultiDict() decoded_headers = raw_headers.decode().split(utils.EOL) for line in decoded_headers[1:]: k, v = line.split(': ', 1) if k in headers: o = headers.setdefault(k, []) if not isinstance(o, list): o = [o] o.append(v) headers[k] = o else: headers[k] = v m = FIRST_LINE_PATTERN['response']['regex'].match(decoded_headers[0]) if m: d = m.groupdict() return Response(status_code=int(d['status_code']), status_message=d['status_message'], headers=headers, first_line=decoded_headers[0]) else: m = FIRST_LINE_PATTERN['request']['regex'].match(decoded_headers[0]) if m: d = m.groupdict() cseq, _ = headers['CSeq'].split() return Request(method=d['method'], headers=headers, cseq=int(cseq), first_line=decoded_headers[0]) else: LOG.debug(decoded_headers) raise ValueError('Not a SIP message')
def from_raw_message(cls, raw_message): lines = raw_message.split(utils.EOL) first_line = lines.pop(0) headers = CIMultiDict() payload = '' reading_headers = True for line in lines: if reading_headers: if ': ' in line: k, v = line.split(': ', 1) if k in headers: o = headers.setdefault(k, []) if not isinstance(o, list): o = [o] o.append(v) headers[k] = o else: headers[k] = v else: # Finish to parse headers reading_headers = False else: # @todo: use content length to read payload payload += line # reading payload if payload == '': payload = None m = FIRST_LINE_PATTERN['response']['regex'].match(first_line) if m: d = m.groupdict() return Response(status_code=int(d['status_code']), status_message=d['status_message'], headers=headers, payload=payload) else: m = FIRST_LINE_PATTERN['request']['regex'].match(first_line) if m: d = m.groupdict() return Request(method=d['method'], headers=headers, payload=payload) else: raise ValueError('Not a SIP message')
async def _ws_connect( self, url: StrOrURL, *, method: str = hdrs.METH_GET, protocols: Iterable[str] = (), timeout: float = 10.0, receive_timeout: Optional[float] = None, autoclose: bool = True, autoping: bool = True, heartbeat: Optional[float] = None, auth: Optional[BasicAuth] = None, origin: Optional[str] = None, headers: Optional[LooseHeaders] = None, proxy: Optional[StrOrURL] = None, proxy_auth: Optional[BasicAuth] = None, ssl: Union[SSLContext, bool, None, Fingerprint] = None, verify_ssl: Optional[bool] = None, fingerprint: Optional[bytes] = None, ssl_context: Optional[SSLContext] = None, proxy_headers: Optional[LooseHeaders] = None, compress: int = 0, max_msg_size: int = 4 * 1024 * 1024) -> ClientWebSocketResponse: if headers is None: real_headers = CIMultiDict() # type: CIMultiDict[str] else: real_headers = CIMultiDict(headers) default_headers = { hdrs.UPGRADE: hdrs.WEBSOCKET, hdrs.CONNECTION: hdrs.UPGRADE, hdrs.SEC_WEBSOCKET_VERSION: '13', } for key, value in default_headers.items(): real_headers.setdefault(key, value) sec_key = base64.b64encode(os.urandom(16)) real_headers[hdrs.SEC_WEBSOCKET_KEY] = sec_key.decode() if protocols: real_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = ','.join(protocols) if origin is not None: real_headers[hdrs.ORIGIN] = origin if compress: extstr = ws_ext_gen(compress=compress) real_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = extstr ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) # send request resp = await self.request(method, url, headers=real_headers, read_until_eof=False, auth=auth, proxy=proxy, proxy_auth=proxy_auth, ssl=ssl, proxy_headers=proxy_headers) try: # check handshake if resp.status != 101: raise WSServerHandshakeError(resp.request_info, resp.history, message='Invalid response status', status=resp.status, headers=resp.headers) if resp.headers.get(hdrs.UPGRADE, '').lower() != 'websocket': raise WSServerHandshakeError(resp.request_info, resp.history, message='Invalid upgrade header', status=resp.status, headers=resp.headers) if resp.headers.get(hdrs.CONNECTION, '').lower() != 'upgrade': raise WSServerHandshakeError( resp.request_info, resp.history, message='Invalid connection header', status=resp.status, headers=resp.headers) # key calculation key = resp.headers.get(hdrs.SEC_WEBSOCKET_ACCEPT, '') match = base64.b64encode(hashlib.sha1(sec_key + WS_KEY).digest()).decode() if key != match: raise WSServerHandshakeError( resp.request_info, resp.history, message='Invalid challenge response', status=resp.status, headers=resp.headers) # websocket protocol protocol = None if protocols and hdrs.SEC_WEBSOCKET_PROTOCOL in resp.headers: resp_protocols = [ proto.strip() for proto in resp.headers[ hdrs.SEC_WEBSOCKET_PROTOCOL].split(',') ] for proto in resp_protocols: if proto in protocols: protocol = proto break # websocket compress notakeover = False if compress: compress_hdrs = resp.headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS) if compress_hdrs: try: compress, notakeover = ws_ext_parse(compress_hdrs) except WSHandshakeError as exc: raise WSServerHandshakeError(resp.request_info, resp.history, message=exc.args[0], status=resp.status, headers=resp.headers) else: compress = 0 notakeover = False conn = resp.connection assert conn is not None proto = conn.protocol assert proto is not None transport = conn.transport assert transport is not None reader = FlowControlDataQueue( proto, limit=2**16, loop=self._loop ) # type: FlowControlDataQueue[WSMessage] # noqa proto.set_parser(WebSocketReader(reader, max_msg_size), reader) writer = WebSocketWriter(proto, transport, use_mask=True, compress=compress, notakeover=notakeover) except BaseException: resp.close() raise else: return self._ws_response_class(reader, writer, protocol, resp, timeout, autoclose, autoping, self._loop, receive_timeout=receive_timeout, heartbeat=heartbeat, compress=compress, client_notakeover=notakeover)
async def _ws_connect( self, url: StrOrURL, *, method: str = hdrs.METH_GET, protocols: Iterable[str] = (), timeout: Union[ClientWSTimeout, float, _SENTINEL] = sentinel, receive_timeout: Optional[float] = None, autoclose: bool = True, autoping: bool = True, heartbeat: Optional[float] = None, auth: Optional[BasicAuth] = None, origin: Optional[str] = None, params: Optional[Mapping[str, str]] = None, headers: Optional[LooseHeaders] = None, proxy: Optional[StrOrURL] = None, proxy_auth: Optional[BasicAuth] = None, ssl: Union[SSLContext, bool, None, Fingerprint] = None, proxy_headers: Optional[LooseHeaders] = None, compress: int = 0, max_msg_size: int = 4 * 1024 * 1024, ) -> ClientWebSocketResponse: if timeout is not sentinel: if isinstance(timeout, ClientWSTimeout): ws_timeout = timeout else: warnings.warn( "parameter 'timeout' of type 'float' " "is deprecated, please use " "'timeout=ClientWSTimeout(ws_close=...)'", DeprecationWarning, stacklevel=2, ) ws_timeout = ClientWSTimeout(ws_close=timeout) # type: ignore[arg-type] else: ws_timeout = DEFAULT_WS_CLIENT_TIMEOUT if receive_timeout is not None: warnings.warn( "float parameter 'receive_timeout' " "is deprecated, please use parameter " "'timeout=ClientWSTimeout(ws_receive=...)'", DeprecationWarning, stacklevel=2, ) ws_timeout = dataclasses.replace(ws_timeout, ws_receive=receive_timeout) if headers is None: real_headers = CIMultiDict() # type: CIMultiDict[str] else: real_headers = CIMultiDict(headers) default_headers = { hdrs.UPGRADE: "websocket", hdrs.CONNECTION: "upgrade", hdrs.SEC_WEBSOCKET_VERSION: "13", } for key, value in default_headers.items(): real_headers.setdefault(key, value) sec_key = base64.b64encode(os.urandom(16)) real_headers[hdrs.SEC_WEBSOCKET_KEY] = sec_key.decode() if protocols: real_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = ",".join(protocols) if origin is not None: real_headers[hdrs.ORIGIN] = origin if compress: extstr = ws_ext_gen(compress=compress) real_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = extstr if not isinstance(ssl, SSL_ALLOWED_TYPES): raise TypeError( "ssl should be SSLContext, bool, Fingerprint, " "or None, got {!r} instead.".format(ssl) ) # send request resp = await self.request( method, url, params=params, headers=real_headers, read_until_eof=False, auth=auth, proxy=proxy, proxy_auth=proxy_auth, ssl=ssl, proxy_headers=proxy_headers, ) try: # check handshake if resp.status != 101: raise WSServerHandshakeError( resp.request_info, resp.history, message="Invalid response status", status=resp.status, headers=resp.headers, ) if resp.headers.get(hdrs.UPGRADE, "").lower() != "websocket": raise WSServerHandshakeError( resp.request_info, resp.history, message="Invalid upgrade header", status=resp.status, headers=resp.headers, ) if resp.headers.get(hdrs.CONNECTION, "").lower() != "upgrade": raise WSServerHandshakeError( resp.request_info, resp.history, message="Invalid connection header", status=resp.status, headers=resp.headers, ) # key calculation r_key = resp.headers.get(hdrs.SEC_WEBSOCKET_ACCEPT, "") match = base64.b64encode(hashlib.sha1(sec_key + WS_KEY).digest()).decode() if r_key != match: raise WSServerHandshakeError( resp.request_info, resp.history, message="Invalid challenge response", status=resp.status, headers=resp.headers, ) # websocket protocol protocol = None if protocols and hdrs.SEC_WEBSOCKET_PROTOCOL in resp.headers: resp_protocols = [ proto.strip() for proto in resp.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",") ] for proto in resp_protocols: if proto in protocols: protocol = proto break # websocket compress notakeover = False if compress: compress_hdrs = resp.headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS) if compress_hdrs: try: compress, notakeover = ws_ext_parse(compress_hdrs) except WSHandshakeError as exc: raise WSServerHandshakeError( resp.request_info, resp.history, message=exc.args[0], status=resp.status, headers=resp.headers, ) from exc else: compress = 0 notakeover = False conn = resp.connection assert conn is not None conn_proto = conn.protocol assert conn_proto is not None transport = conn.transport assert transport is not None reader = FlowControlDataQueue( conn_proto, 2 ** 16, loop=self._loop ) # type: FlowControlDataQueue[WSMessage] conn_proto.set_parser(WebSocketReader(reader, max_msg_size), reader) writer = WebSocketWriter( conn_proto, transport, use_mask=True, compress=compress, notakeover=notakeover, ) except BaseException: resp.close() raise else: return self._ws_response_class( reader, writer, protocol, resp, ws_timeout, autoclose, autoping, self._loop, heartbeat=heartbeat, compress=compress, client_notakeover=notakeover, )
class StreamResponse(HeadersMixin): def __init__(self, *, status=200, reason=None, headers=None): self._body = None self._keep_alive = None self._chunked = False self._chunk_size = None self._compression = False self._compression_force = False self._headers = CIMultiDict() self._cookies = SimpleCookie() self.set_status(status, reason) self._req = None self._resp_impl = None self._eof_sent = False self._task = None if headers is not None: # TODO: optimize CIMultiDict extending self._headers.extend(headers) self._headers.setdefault(hdrs.CONTENT_TYPE, 'application/octet-stream') @property def prepared(self): return self._resp_impl is not None @property def started(self): warnings.warn('use Response.prepared instead', DeprecationWarning) return self.prepared @property def task(self): return self._task @property def status(self): return self._status @property def chunked(self): return self._chunked @property def compression(self): return self._compression @property def reason(self): return self._reason def set_status(self, status, reason=None): self._status = int(status) if reason is None: reason = ResponseImpl.calc_reason(status) self._reason = reason @property def keep_alive(self): return self._keep_alive def force_close(self): self._keep_alive = False @property def body_length(self): return self._resp_impl.body_length @property def output_length(self): return self._resp.impl.output_length def enable_chunked_encoding(self, chunk_size=None): """Enables automatic chunked transfer encoding.""" self._chunked = True self._chunk_size = chunk_size def enable_compression(self, force=None): """Enables response compression encoding.""" # Backwards compatibility for when force was a bool <0.17. if type(force) == bool: force = ContentCoding.deflate if force else ContentCoding.identity elif force is not None: assert isinstance(force, ContentCoding), ("force should one of " "None, bool or " "ContentEncoding") self._compression = True self._compression_force = force @property def headers(self): return self._headers @property def cookies(self): return self._cookies def set_cookie(self, name, value, *, expires=None, domain=None, max_age=None, path='/', secure=None, httponly=None, version=None): """Set or update response cookie. Sets new cookie or updates existent with new value. Also updates only those params which are not None. """ old = self._cookies.get(name) if old is not None and old.coded_value == '': # deleted cookie self._cookies.pop(name, None) self._cookies[name] = value c = self._cookies[name] if expires is not None: c['expires'] = expires elif c.get('expires') == 'Thu, 01 Jan 1970 00:00:00 GMT': del c['expires'] if domain is not None: c['domain'] = domain if max_age is not None: c['max-age'] = max_age elif 'max-age' in c: del c['max-age'] c['path'] = path if secure is not None: c['secure'] = secure if httponly is not None: c['httponly'] = httponly if version is not None: c['version'] = version def del_cookie(self, name, *, domain=None, path='/'): """Delete cookie. Creates new empty expired cookie. """ # TODO: do we need domain/path here? self._cookies.pop(name, None) self.set_cookie(name, '', max_age=0, expires="Thu, 01 Jan 1970 00:00:00 GMT", domain=domain, path=path) @property def content_length(self): # Just a placeholder for adding setter return super().content_length @content_length.setter def content_length(self, value): if value is not None: value = int(value) # TODO: raise error if chunked enabled self.headers[hdrs.CONTENT_LENGTH] = str(value) else: self.headers.pop(hdrs.CONTENT_LENGTH, None) @property def content_type(self): # Just a placeholder for adding setter return super().content_type @content_type.setter def content_type(self, value): self.content_type # read header values if needed self._content_type = str(value) self._generate_content_type_header() @property def charset(self): # Just a placeholder for adding setter return super().charset @charset.setter def charset(self, value): ctype = self.content_type # read header values if needed if ctype == 'application/octet-stream': raise RuntimeError("Setting charset for application/octet-stream " "doesn't make sense, setup content_type first") if value is None: self._content_dict.pop('charset', None) else: self._content_dict['charset'] = str(value).lower() self._generate_content_type_header() @property def last_modified(self, _LAST_MODIFIED=hdrs.LAST_MODIFIED): """The value of Last-Modified HTTP header, or None. This header is represented as a `datetime` object. """ httpdate = self.headers.get(_LAST_MODIFIED) if httpdate is not None: timetuple = parsedate(httpdate) if timetuple is not None: return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc) return None @last_modified.setter def last_modified(self, value): if value is None: self.headers.pop(hdrs.LAST_MODIFIED, None) elif isinstance(value, (int, float)): self.headers[hdrs.LAST_MODIFIED] = time.strftime( "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(math.ceil(value))) elif isinstance(value, datetime.datetime): self.headers[hdrs.LAST_MODIFIED] = time.strftime( "%a, %d %b %Y %H:%M:%S GMT", value.utctimetuple()) elif isinstance(value, str): self.headers[hdrs.LAST_MODIFIED] = value @property def tcp_nodelay(self): resp_impl = self._resp_impl if resp_impl is None: raise RuntimeError("Cannot get tcp_nodelay for " "not prepared response") return resp_impl.transport.tcp_nodelay def set_tcp_nodelay(self, value): resp_impl = self._resp_impl if resp_impl is None: raise RuntimeError("Cannot set tcp_nodelay for " "not prepared response") resp_impl.transport.set_tcp_nodelay(value) @property def tcp_cork(self): resp_impl = self._resp_impl if resp_impl is None: raise RuntimeError("Cannot get tcp_cork for " "not prepared response") return resp_impl.transport.tcp_cork def set_tcp_cork(self, value): resp_impl = self._resp_impl if resp_impl is None: raise RuntimeError("Cannot set tcp_cork for " "not prepared response") resp_impl.transport.set_tcp_cork(value) def _generate_content_type_header(self, CONTENT_TYPE=hdrs.CONTENT_TYPE): params = '; '.join("%s=%s" % i for i in self._content_dict.items()) if params: ctype = self._content_type + '; ' + params else: ctype = self._content_type self.headers[CONTENT_TYPE] = ctype def _start_pre_check(self, request): if self._resp_impl is not None: if self._req is not request: raise RuntimeError( "Response has been started with different request.") else: return self._resp_impl else: return None def _do_start_compression(self, coding): if coding != ContentCoding.identity: self.headers[hdrs.CONTENT_ENCODING] = coding.value self._resp_impl.add_compression_filter(coding.value) self.content_length = None def _start_compression(self, request): if self._compression_force: self._do_start_compression(self._compression_force) else: accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, '').lower() for coding in ContentCoding: if coding.value in accept_encoding: self._do_start_compression(coding) return def start(self, request): warnings.warn('use .prepare(request) instead', DeprecationWarning) resp_impl = self._start_pre_check(request) if resp_impl is not None: return resp_impl return self._start(request) @asyncio.coroutine def prepare(self, request): resp_impl = self._start_pre_check(request) if resp_impl is not None: return resp_impl yield from request._prepare_hook(self) return self._start(request) def _start(self, request, HttpVersion10=HttpVersion10, HttpVersion11=HttpVersion11, CONNECTION=hdrs.CONNECTION, DATE=hdrs.DATE, SERVER=hdrs.SERVER, SET_COOKIE=hdrs.SET_COOKIE, TRANSFER_ENCODING=hdrs.TRANSFER_ENCODING): self._req = request keep_alive = self._keep_alive if keep_alive is None: keep_alive = request.keep_alive self._keep_alive = keep_alive version = request.version resp_impl = self._resp_impl = ResponseImpl(request._writer, self._status, version, not keep_alive, self._reason) headers = self.headers for cookie in self._cookies.values(): value = cookie.output(header='')[1:] headers.add(SET_COOKIE, value) if self._compression: self._start_compression(request) if self._chunked: if request.version != HttpVersion11: raise RuntimeError("Using chunked encoding is forbidden " "for HTTP/{0.major}.{0.minor}".format( request.version)) resp_impl.chunked = True if self._chunk_size: resp_impl.add_chunking_filter(self._chunk_size) headers[TRANSFER_ENCODING] = 'chunked' else: resp_impl.length = self.content_length headers.setdefault(DATE, request._time_service.strtime()) headers.setdefault(SERVER, resp_impl.SERVER_SOFTWARE) if CONNECTION not in headers: if keep_alive: if version == HttpVersion10: headers[CONNECTION] = 'keep-alive' else: if version == HttpVersion11: headers[CONNECTION] = 'close' resp_impl.headers = headers self._send_headers(resp_impl) self._task = request._task return resp_impl def _send_headers(self, resp_impl): # Durty hack required for # https://github.com/KeepSafe/aiohttp/issues/1093 # File sender may override it resp_impl.send_headers() def write(self, data): assert isinstance(data, (bytes, bytearray, memoryview)), \ "data argument must be byte-ish (%r)" % type(data) if self._eof_sent: raise RuntimeError("Cannot call write() after write_eof()") if self._resp_impl is None: raise RuntimeError("Cannot call write() before start()") if data: return self._resp_impl.write(data) else: return () @asyncio.coroutine def drain(self): if self._resp_impl is None: raise RuntimeError("Response has not been started") yield from self._resp_impl.transport.drain() @asyncio.coroutine def write_eof(self): if self._eof_sent: return if self._resp_impl is None: raise RuntimeError("Response has not been started") yield from self._resp_impl.write_eof() self._eof_sent = True def __repr__(self): if self.started: info = "{} {} ".format(self._req.method, self._req.path) else: info = "not started" return "<{} {} {}>".format(self.__class__.__name__, self.reason, info)
async def _ws_connect( self, url: StrOrURL, *, method: str=hdrs.METH_GET, protocols: Iterable[str]=(), timeout: float=10.0, receive_timeout: Optional[float]=None, autoclose: bool=True, autoping: bool=True, heartbeat: Optional[float]=None, auth: Optional[BasicAuth]=None, origin: Optional[str]=None, headers: Optional[LooseHeaders]=None, proxy: Optional[StrOrURL]=None, proxy_auth: Optional[BasicAuth]=None, ssl: Union[SSLContext, bool, None, Fingerprint]=None, proxy_headers: Optional[LooseHeaders]=None, compress: int=0, max_msg_size: int=4*1024*1024 ) -> ClientWebSocketResponse: if headers is None: real_headers = CIMultiDict() # type: CIMultiDict[str] else: real_headers = CIMultiDict(headers) default_headers = { hdrs.UPGRADE: hdrs.WEBSOCKET, hdrs.CONNECTION: hdrs.UPGRADE, hdrs.SEC_WEBSOCKET_VERSION: '13', } for key, value in default_headers.items(): real_headers.setdefault(key, value) sec_key = base64.b64encode(os.urandom(16)) real_headers[hdrs.SEC_WEBSOCKET_KEY] = sec_key.decode() if protocols: real_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = ','.join(protocols) if origin is not None: real_headers[hdrs.ORIGIN] = origin if compress: extstr = ws_ext_gen(compress=compress) real_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = extstr if not isinstance(ssl, SSL_ALLOWED_TYPES): raise TypeError("ssl should be SSLContext, bool, Fingerprint, " "or None, got {!r} instead.".format(ssl)) # send request resp = await self.request(method, url, headers=real_headers, read_until_eof=False, auth=auth, proxy=proxy, proxy_auth=proxy_auth, ssl=ssl, proxy_headers=proxy_headers) try: # check handshake if resp.status != 101: raise WSServerHandshakeError( resp.request_info, resp.history, message='Invalid response status', status=resp.status, headers=resp.headers) if resp.headers.get(hdrs.UPGRADE, '').lower() != 'websocket': raise WSServerHandshakeError( resp.request_info, resp.history, message='Invalid upgrade header', status=resp.status, headers=resp.headers) if resp.headers.get(hdrs.CONNECTION, '').lower() != 'upgrade': raise WSServerHandshakeError( resp.request_info, resp.history, message='Invalid connection header', status=resp.status, headers=resp.headers) # key calculation key = resp.headers.get(hdrs.SEC_WEBSOCKET_ACCEPT, '') match = base64.b64encode( hashlib.sha1(sec_key + WS_KEY).digest()).decode() if key != match: raise WSServerHandshakeError( resp.request_info, resp.history, message='Invalid challenge response', status=resp.status, headers=resp.headers) # websocket protocol protocol = None if protocols and hdrs.SEC_WEBSOCKET_PROTOCOL in resp.headers: resp_protocols = [ proto.strip() for proto in resp.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(',')] for proto in resp_protocols: if proto in protocols: protocol = proto break # websocket compress notakeover = False if compress: compress_hdrs = resp.headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS) if compress_hdrs: try: compress, notakeover = ws_ext_parse(compress_hdrs) except WSHandshakeError as exc: raise WSServerHandshakeError( resp.request_info, resp.history, message=exc.args[0], status=resp.status, headers=resp.headers) else: compress = 0 notakeover = False conn = resp.connection assert conn is not None proto = conn.protocol assert proto is not None transport = conn.transport assert transport is not None reader = FlowControlDataQueue( proto, limit=2 ** 16, loop=self._loop) # type: FlowControlDataQueue[WSMessage] # noqa proto.set_parser(WebSocketReader(reader, max_msg_size), reader) writer = WebSocketWriter( proto, transport, use_mask=True, compress=compress, notakeover=notakeover) except BaseException: resp.close() raise else: return self._ws_response_class(reader, writer, protocol, resp, timeout, autoclose, autoping, self._loop, receive_timeout=receive_timeout, heartbeat=heartbeat, compress=compress, client_notakeover=notakeover)
class StreamResponse(HeadersMixin): def __init__(self, *, status=200, reason=None, headers=None): self._body = None self._keep_alive = None self._chunked = False self._chunk_size = None self._compression = False self._compression_force = False self._headers = CIMultiDict() self._cookies = SimpleCookie() self._req = None self._resp_impl = None self._eof_sent = False self._task = None if headers is not None: # TODO: optimize CIMultiDict extending self._headers.extend(headers) self._headers.setdefault(hdrs.CONTENT_TYPE, 'application/octet-stream') self.set_status(status, reason) @property def prepared(self): return self._resp_impl is not None @property def started(self): warnings.warn('use Response.prepared instead', DeprecationWarning) return self.prepared @property def task(self): return self._task @property def status(self): return self._status @property def chunked(self): return self._chunked @property def compression(self): return self._compression @property def reason(self): return self._reason def set_status(self, status, reason=None): if self.prepared: raise RuntimeError("Cannot change the response status code after " "the headers have been sent") self._status = int(status) if reason is None: reason = ResponseImpl.calc_reason(status) self._reason = reason @property def keep_alive(self): return self._keep_alive def force_close(self): self._keep_alive = False @property def body_length(self): return self._resp_impl.body_length @property def output_length(self): return self._resp_impl.output_length def enable_chunked_encoding(self, chunk_size=None): """Enables automatic chunked transfer encoding.""" self._chunked = True self._chunk_size = chunk_size def enable_compression(self, force=None): """Enables response compression encoding.""" # Backwards compatibility for when force was a bool <0.17. if type(force) == bool: force = ContentCoding.deflate if force else ContentCoding.identity elif force is not None: assert isinstance(force, ContentCoding), ("force should one of " "None, bool or " "ContentEncoding") self._compression = True self._compression_force = force @property def headers(self): return self._headers @property def cookies(self): return self._cookies def set_cookie(self, name, value, *, expires=None, domain=None, max_age=None, path='/', secure=None, httponly=None, version=None): """Set or update response cookie. Sets new cookie or updates existent with new value. Also updates only those params which are not None. """ old = self._cookies.get(name) if old is not None and old.coded_value == '': # deleted cookie self._cookies.pop(name, None) self._cookies[name] = value c = self._cookies[name] if expires is not None: c['expires'] = expires elif c.get('expires') == 'Thu, 01 Jan 1970 00:00:00 GMT': del c['expires'] if domain is not None: c['domain'] = domain if max_age is not None: c['max-age'] = max_age elif 'max-age' in c: del c['max-age'] c['path'] = path if secure is not None: c['secure'] = secure if httponly is not None: c['httponly'] = httponly if version is not None: c['version'] = version def del_cookie(self, name, *, domain=None, path='/'): """Delete cookie. Creates new empty expired cookie. """ # TODO: do we need domain/path here? self._cookies.pop(name, None) self.set_cookie(name, '', max_age=0, expires="Thu, 01 Jan 1970 00:00:00 GMT", domain=domain, path=path) @property def content_length(self): # Just a placeholder for adding setter return super().content_length @content_length.setter def content_length(self, value): if value is not None: value = int(value) # TODO: raise error if chunked enabled self.headers[hdrs.CONTENT_LENGTH] = str(value) else: self.headers.pop(hdrs.CONTENT_LENGTH, None) @property def content_type(self): # Just a placeholder for adding setter return super().content_type @content_type.setter def content_type(self, value): self.content_type # read header values if needed self._content_type = str(value) self._generate_content_type_header() @property def charset(self): # Just a placeholder for adding setter return super().charset @charset.setter def charset(self, value): ctype = self.content_type # read header values if needed if ctype == 'application/octet-stream': raise RuntimeError("Setting charset for application/octet-stream " "doesn't make sense, setup content_type first") if value is None: self._content_dict.pop('charset', None) else: self._content_dict['charset'] = str(value).lower() self._generate_content_type_header() @property def last_modified(self, _LAST_MODIFIED=hdrs.LAST_MODIFIED): """The value of Last-Modified HTTP header, or None. This header is represented as a `datetime` object. """ httpdate = self.headers.get(_LAST_MODIFIED) if httpdate is not None: timetuple = parsedate(httpdate) if timetuple is not None: return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc) return None @last_modified.setter def last_modified(self, value): if value is None: self.headers.pop(hdrs.LAST_MODIFIED, None) elif isinstance(value, (int, float)): self.headers[hdrs.LAST_MODIFIED] = time.strftime( "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(math.ceil(value))) elif isinstance(value, datetime.datetime): self.headers[hdrs.LAST_MODIFIED] = time.strftime( "%a, %d %b %Y %H:%M:%S GMT", value.utctimetuple()) elif isinstance(value, str): self.headers[hdrs.LAST_MODIFIED] = value @property def tcp_nodelay(self): resp_impl = self._resp_impl if resp_impl is None: raise RuntimeError("Cannot get tcp_nodelay for " "not prepared response") return resp_impl.transport.tcp_nodelay def set_tcp_nodelay(self, value): resp_impl = self._resp_impl if resp_impl is None: raise RuntimeError("Cannot set tcp_nodelay for " "not prepared response") resp_impl.transport.set_tcp_nodelay(value) @property def tcp_cork(self): resp_impl = self._resp_impl if resp_impl is None: raise RuntimeError("Cannot get tcp_cork for " "not prepared response") return resp_impl.transport.tcp_cork def set_tcp_cork(self, value): resp_impl = self._resp_impl if resp_impl is None: raise RuntimeError("Cannot set tcp_cork for " "not prepared response") resp_impl.transport.set_tcp_cork(value) def _generate_content_type_header(self, CONTENT_TYPE=hdrs.CONTENT_TYPE): params = '; '.join("%s=%s" % i for i in self._content_dict.items()) if params: ctype = self._content_type + '; ' + params else: ctype = self._content_type self.headers[CONTENT_TYPE] = ctype def _start_pre_check(self, request): if self._resp_impl is not None: if self._req is not request: raise RuntimeError( "Response has been started with different request.") else: return self._resp_impl else: return None def _do_start_compression(self, coding): if coding != ContentCoding.identity: self.headers[hdrs.CONTENT_ENCODING] = coding.value self._resp_impl.add_compression_filter(coding.value) self.content_length = None def _start_compression(self, request): if self._compression_force: self._do_start_compression(self._compression_force) else: accept_encoding = request.headers.get( hdrs.ACCEPT_ENCODING, '').lower() for coding in ContentCoding: if coding.value in accept_encoding: self._do_start_compression(coding) return def start(self, request): warnings.warn('use .prepare(request) instead', DeprecationWarning) resp_impl = self._start_pre_check(request) if resp_impl is not None: return resp_impl return self._start(request) @asyncio.coroutine def prepare(self, request): resp_impl = self._start_pre_check(request) if resp_impl is not None: return resp_impl yield from request._prepare_hook(self) return self._start(request) def _start(self, request, HttpVersion10=HttpVersion10, HttpVersion11=HttpVersion11, CONNECTION=hdrs.CONNECTION, DATE=hdrs.DATE, SERVER=hdrs.SERVER, SET_COOKIE=hdrs.SET_COOKIE, TRANSFER_ENCODING=hdrs.TRANSFER_ENCODING): self._req = request keep_alive = self._keep_alive if keep_alive is None: keep_alive = request.keep_alive self._keep_alive = keep_alive version = request.version resp_impl = self._resp_impl = ResponseImpl( request._writer, self._status, version, not keep_alive, self._reason) headers = self.headers for cookie in self._cookies.values(): value = cookie.output(header='')[1:] headers.add(SET_COOKIE, value) if self._compression: self._start_compression(request) if self._chunked: if request.version != HttpVersion11: raise RuntimeError("Using chunked encoding is forbidden " "for HTTP/{0.major}.{0.minor}".format( request.version)) resp_impl.chunked = True if self._chunk_size: resp_impl.add_chunking_filter(self._chunk_size) headers[TRANSFER_ENCODING] = 'chunked' else: resp_impl.length = self.content_length headers.setdefault(DATE, request.time_service.strtime()) headers.setdefault(SERVER, resp_impl.SERVER_SOFTWARE) if CONNECTION not in headers: if keep_alive: if version == HttpVersion10: headers[CONNECTION] = 'keep-alive' else: if version == HttpVersion11: headers[CONNECTION] = 'close' resp_impl.headers = headers self._send_headers(resp_impl) self._task = request._task return resp_impl def _send_headers(self, resp_impl): # Durty hack required for # https://github.com/KeepSafe/aiohttp/issues/1093 # File sender may override it resp_impl.send_headers() def write(self, data): assert isinstance(data, (bytes, bytearray, memoryview)), \ "data argument must be byte-ish (%r)" % type(data) if self._eof_sent: raise RuntimeError("Cannot call write() after write_eof()") if self._resp_impl is None: raise RuntimeError("Cannot call write() before start()") if data: return self._resp_impl.write(data) else: return () @asyncio.coroutine def drain(self): if self._resp_impl is None: raise RuntimeError("Response has not been started") yield from self._resp_impl.transport.drain() @asyncio.coroutine def write_eof(self): if self._eof_sent: return if self._resp_impl is None: raise RuntimeError("Response has not been started") yield from self._resp_impl.write_eof() self._eof_sent = True def __repr__(self): if self.started: info = "{} {} ".format(self._req.method, self._req.path) else: info = "not started" return "<{} {} {}>".format(self.__class__.__name__, self.reason, info)