def handle_one_request(self): tcs = self.server.test_case_server requestline = self.rfile.readline() parse_headers(self.rfile) if requestline.startswith(b'POST'): # The body should be a single line (or we don't know where it ends # and we don't want to issue a blocking read) self.rfile.readline() self.wfile.write(tcs.canned_response)
def handle_response(self): """Perform the curl operation and handle the response. :return: The body of the response on success. :raises CouldNotConnect: if there is a connection error. :raises CertificateVerificationFailed: if the SSL certificate could not be verified. :raises HTTPError: if the response status is not 200. """ try: self.curl.perform() except pycurl.error as e: if e.args[0] in (pycurl.E_COULDNT_CONNECT, pycurl.E_COULDNT_RESOLVE_HOST): raise CouldNotConnect elif e.args[0] == pycurl.E_SSL_CACERT: raise CertificateVerificationFailed else: raise status = self.curl.getinfo(pycurl.HTTP_CODE) if status == 200: return self.result.getvalue().decode('utf-8') else: lines = self.response_header.getvalue().decode('utf-8').splitlines( True) header_ = ''.join(lines[1:]) headers = parse_headers(BytesIO(header_.encode('ascii'))) raise urllib.error.HTTPError(self.curl.getinfo( pycurl.EFFECTIVE_URL), code=status, msg=self.result.getvalue(), hdrs=headers, fp=StringIO())
def headers_factory(fp, *args): # pylint:disable=unused-argument try: ret = client.parse_headers(fp, _class=OldMessage) except client.LineTooLong: ret = OldMessage() ret.status = 'Line too long' return ret
def get(self, path, host=None, headers: dict = None): headers = headers or {} host = host or self._host if host: headers['Host'] = host headers_str = '\r\n'.join(f'{key}: {val}' for (key, val) in headers.items()) http_query = f'GET {path} HTTP/1.0\r\n{headers_str}\r\n\r\n' self._stream.send(http_query.encode()) raw_response = recv_all(self._stream) header, body = raw_response.split(b'\r\n\r\n', 1) f = BytesIO(header) request_line = f.readline().split(b' ') protocol, status = request_line[:2] status = int(status) headers = parse_headers(f) if headers['Content-Encoding'] == 'deflate': body = zlib.decompress(body) elif headers['Content-Encoding'] == 'gzip': body = gzip.decompress(body) if status != 200: logger.debug('raw_response = %s', raw_response) return status, body
def headers_factory(fp, *args): try: ret = client.parse_headers(fp, _class=OldMessage) except client.LineTooLong: ret = OldMessage() ret.status = 'Line too long' return ret
def test_assert_header_parsing_no_error_on_multipart(self): from http import client header_msg = io.BytesIO() header_msg.write( b'Content-Type: multipart/encrypted;protocol="application/' b'HTTP-SPNEGO-session-encrypted";boundary="Encrypted Boundary"' b"\nServer: Microsoft-HTTPAPI/2.0\nDate: Fri, 16 Aug 2019 19:28:01 GMT" b"\nContent-Length: 1895\n\n\n") header_msg.seek(0) assert_header_parsing(client.parse_headers(header_msg))
def read_range_definition(self): """Read a new range definition in a multi parts message. Parse the headers including the empty line following them so that we are ready to read the data itself. """ self._headers = http_client.parse_headers(self._file) # Extract the range definition content_range = self._headers.get('content-range', None) if content_range is None: raise errors.InvalidHttpResponse( self._path, 'Content-Range header missing in a multi-part response') self.set_range_from_header(content_range)
def _setup(self): super().setup() self.raw_requestline = self.rfile.readline(65537) if len(self.raw_requestline) > 65536: raise HandlerException(HTTPStatus.REQUEST_URI_TOO_LONG) if not self.raw_requestline: raise HandlerException(HTTPStatus.BAD_REQUEST, "No request line") words = str(self.raw_requestline, 'iso-8859-1').rstrip('\r\n').split() if len(words) != 3: raise Exception(f'Invalid request: {words}') self.command, self.path, version = words if not version.startswith('HTTP/'): raise HandlerException(HTTPStatus.HTTP_VERSION_NOT_SUPPORTED, version) self.request_version = version.split('/', 1)[1] self.headers = client.parse_headers(self.rfile, _class=self.MessageClass)
def _handle_request(self, request: bytes) -> None: headers, body = request.split(b'\r\n\r\n', 1) headers = io.BytesIO(headers) headers.readline() headers = {key: value for key, value in parse_headers(headers).items()} method, path, protocol = request.split(b'\r\n', 1)[0].decode(ENCODING).lower().split(' ') logger.info(f'Message Recieved[{method}:{path}]') if method == self._api.method.value: full_path: str = f'/{self._stage}{self._api.route.route}' if path == full_path: self._begin_response(method, path, protocol, headers, body) else: self._invalid_path(path, self._api.route.route) else: self._invalid_method(method, self._api.method.value)
async def read_stdout(stream): b = BytesIO() line = await stream.readline() while line != b'\r\n': b.write(line) line = await stream.readline() b.seek(0) headers = parse_headers(b) status = headers.get("Status") if status: del headers["Status"] (status_code, status_reason) = status.split(" ", 1) status_code = int(status_code) status_reason = status_reason else: status_code = 200 status_reason = "OK" if 'Content-Length' in headers: content_length = int(headers['Content-Length']) return web.Response(headers=headers, status=status_code, reason=status_reason, body=await p.stdout.read(content_length)) else: response = web.StreamResponse( headers=headers, status=status_code, reason=status_reason, ) if tuple(request.version) == (1, 1): response.enable_chunked_encoding() await response.prepare(request) chunk = await p.stdout.read(GIT_BACKEND_CHUNK_SIZE) while chunk: await response.write(chunk) chunk = await p.stdout.read(GIT_BACKEND_CHUNK_SIZE) await response.write_eof() return response
def parse_request(self): """ The handshake request parse When Connection header is Upgrade, stored sec_webscoket_key header Return True for success """ self.sec_websocket_key = None self.close_connection = True self.headers = client.parse_headers(self.rfile) conntype = self.headers.get('Connection', "") if conntype.lower() == 'closing': self.close_connection = True elif conntype.lower() == 'upgrade': self.close_connection = False self.sec_websocket_key = self.headers.get('Sec-WebSocket-Key', "") return True
def build_response(req_url, result_path, optparams=None): # Параметры ответа по умолчанию params = { 'status': 200, 'status_msg': 'OK', 'headers': { 'Content-Type': 'text/html; charset=utf-8', 'Set-Cookie': [ 'TABUNSESSIONID=abcdef9876543210abcdef9876543210; path=/', ]}, 'url': req_url } # Параметры, переданные тестом if optparams: optparams = optparams.copy() params['headers'].update(optparams.pop('headers', {})) params.update(optparams) # Само содержимое подделываемого ответа на HTTP-запрос fp = BytesIO(load_file(result_path) if result_path else params.get('data', b'')) # Собираем HTTP-заголовки raw_headers = '' for header, value in params['headers'].items(): raw_headers += header + ': ' if isinstance(value, (tuple, list)): raw_headers += ('\r\n' + header + ': ').join(value) else: raw_headers += value raw_headers += '\r\n' headers = parse_headers(BytesIO(raw_headers.encode('utf-8'))) # Для некоторых ошибок нужно сгенерировать исключение if (params['status'] >= 500 or params['status'] in (404,)) and not params.get('noexc'): raise urequest.HTTPError(params['url'], params['status'], params['status_msg'], headers, fp) # Собираем ответ на HTTP-запрос resp = urequest.addinfourl(fp, headers, params['url']) resp.code = params['status'] resp.msg = params['status_msg'] return resp
def do_CONNECT(self): self.wfile.write(b"HTTP/1.1 200 Connection Established\r\nProxy-Agent:" b" Script Proxy 0.1\r\n\r\n") host, port = self.path.split(":") port = int(port) sock = ssl.wrap_socket(self.request, certfile=get_cert(host), server_side=True) rfile = sock.makefile('rb') wfile = sock.makefile('wb') raw_first_line = rfile.readline() first_line = raw_first_line.decode("utf-8")[:-2] command, path, version = first_line.split(" ", 2) url = "https://" + host + ("" if port == 443 else ":" + str(port)) + path self.do_request(command, url, version, parse_headers(rfile), rfile, wfile)
def make_handshake(request): request_line = request.readline() if not request_line.startswith(b'GET'): raise BadRequestException("The method should be GET") headers = httplib.parse_headers(request) if 'websocket' != headers.get('upgrade', '').lower().strip(): raise BadRequestException('No WebSocket UPGRADE hdr: {}'.format( headers.get('upgrade'))) if 'upgrade' not in headers.get('connection', '').lower(): raise BadRequestException( 'No CONNECTION upgrade hdr: {}'.format( headers.get('connection'))) # check supported version version = headers.get('sec-websocket-version') if version not in ('13', '8'): raise BadRequestException( 'Unsupported version: {}'.format(version)) # check client handshake for validity key = headers.get('sec-websocket-key') try: if not key or len(base64.b64decode(key)) != 16: raise BadRequestException( 'Handshake error: {!r}'.format(key)) except binascii.Error: raise BadRequestException( 'Handshake error: {!r}'.format(key)) accept_key = hashlib.sha1(key.encode() + WS_KEY).digest() return "HTTP/1.1 101 Switching Protocols\r\n" \ "UPGRADE: websocket\r\n" \ "CONNECTION: upgrade\r\n" \ "TRANSFER-ENCODING: chunked\r\n" \ "SEC-WEBSOCKET-ACCEPT: %s\r\n\r\n" % \ base64.b64encode(accept_key).decode()
def test_get_cookies_dict(): from io import BytesIO if PY2: from httplib import HTTPMessage else: from http.client import parse_headers fp = BytesIO( b'Set-Cookie: a=b; path=/\r\n' b'Set-Cookie: c=d; path=/\r\n' b'\r\n' ) if PY2: msg = HTTPMessage(fp) else: msg = parse_headers(fp) cookies = utils.get_cookies_dict(msg) assert cookies == {'a': 'b', 'c': 'd'} for k, v in cookies.items(): assert isinstance(k, text) assert isinstance(v, text)
def __call__(self, request): if settings.VERBOSE: print("===================") print(request.get_head().strip().decode("utf-8")) print("...") input = self.get_input(request) sock = self.connect(self.get_socket(), request) if request.protocol == "https": sock = self.ssl_wrap(sock) while True: chunk = input.read(settings.CHUNK_SIZE) if len(chunk) == 0: break sock.sendall(chunk) content = sock.makefile("rb") status_line = content.readline().decode("iso-8859-1") status_line_split = status_line.strip().split(" ", 2) if len(status_line_split) == 2: http_version, status = status_line_split message = "" else: http_version, status, message = status_line_split headers = parse_headers(content) response = request.create_response(status, message, http_version, headers, content) if settings.VERBOSE: print(response.get_head().strip().decode("utf-8")) print("===================") return response
def _internal(self, method): url = 'http://localhost:{}{}'.format(self.backend_port, self.path) # self.headers.as_string() # data is not yet handled. Need content-length?) headers = dict(self.headers) with open('headers.data', 'rb') as fp: # Throw away first line which is not a header request_line = fp.readline() headers.update(dict(parse_headers(fp))) content_len = int(self.headers.get('content-length', 0)) body = self.rfile.read(content_len) self.log_message('REQUEST_LINE: {}'.format( request_line.decode('ascii').strip())) self.log_message('HEADERS:\n{}'.format('\n'.join( ['{}: {}'.format(k, v) for k, v in headers.items()]))) self.log_message('BODY:\n{}'.format(body.decode('ascii'))) resp = requests.request(method, url, headers=headers, data=body) self.send_response(resp.status_code) for key, value in resp.headers.items(): self.send_header(key, value) self.end_headers() self.wfile.write(resp.content)
def headers_factory(fp): ret = client.parse_headers(fp, _class=OldMessage) return ret
def make_headers(fp): return HTTPMessage(fp) if Compatibility.PY2 else parse_headers(fp)
def httpresponse_patched_begin(self): """ Re-implemented httplib begin function to not loop over "100 CONTINUE" status replies but to report it to higher level so it can be processed. """ if self.headers is not None: # we've already started reading the response return # read only one status even if we get a non-100 response version, status, reason = self._read_status() self.code = self.status = status self.reason = reason.strip() if version in ('HTTP/1.0', 'HTTP/0.9'): # Some servers might still return "0.9", treat it as 1.0 anyway self.version = 10 elif version.startswith('HTTP/1.'): self.version = 11 # use HTTP/1.1 code for HTTP/1.x where x>=1 else: raise UnknownProtocol(version) self.headers = self.msg = httplib.parse_headers(self.fp) if self.debuglevel > 0: for hdr in self.headers: print("header:", hdr, end=" ") # are we using the chunked-style of transfer encoding? tr_enc = self.headers.get('transfer-encoding') if tr_enc and tr_enc.lower() == "chunked": self.chunked = True self.chunk_left = None else: self.chunked = False # will the connection close at the end of the response? self.will_close = self._check_close() # do we have a Content-Length? # NOTE: RFC 2616, S4.4, #3 says we ignore this if tr_enc is "chunked" self.length = None length = self.headers.get('content-length') if length and not self.chunked: try: self.length = int(length) except ValueError: self.length = None else: if self.length < 0: # ignore nonsensical negative lengths self.length = None else: self.length = None # does the body have a fixed length? (of zero) if (status == NO_CONTENT or status == NOT_MODIFIED or 100 <= status < 200 or # 1xx codes self._method == 'HEAD'): self.length = 0 # if the connection remains open, and we aren't using chunked, and # a content-length was not provided, then assume that the connection # WILL close. if (not self.will_close and not self.chunked and self.length is None): self.will_close = True
def _gae_urlfetch(appid, payload, getfast, method, realurl): request_params, http_util, connection_cache_key = _get_request_params(appid) if http_util is http_gws: request_headers = { 'User-Agent': 'Mozilla/5.0', 'Accept-Encoding': 'gzip', 'Content-Length': str(len(payload)) } else: #禁用 CDN 不兼容的 GAE chunked 机制 request_headers = { 'User-Agent': '', 'Content-Length': str(len(payload)) } while True: response = http_util.request(request_params, payload, request_headers, connection_cache_key=connection_cache_key, getfast=getfast, realmethod=method, realurl=realurl) if response is None: return if response.status not in (200, 404): break if http_util is http_nor: break app_server = response.headers.get('Server') if app_server == 'Google Frontend': break if GC.GAE_ENABLEPROXY: logging.warning('GAE 前置代理 [%s:%d] 无法正常工作', *response.xip) continue if test_ip_gae(response.xip[0]): break logging.warning('发现并移除非 GAE IP:%s,Server:%s', response.xip[0], app_server) response.http_util = http_util response.connection_cache_key = connection_cache_key response.app_status = response.status if response.status != 200: return response #解压并解析 chunked & gziped 响应 if 'Transfer-Encoding' in response.headers: responseg = HTTPResponse(GzipSock(response), method=method) responseg.begin() responseg.app_status = 200 responseg.xip = response.xip responseg.sock = response.sock responseg.http_util = http_util responseg.connection_cache_key = connection_cache_key return responseg #读取压缩头部 data = response.read(2) if len(data) < 2: response.status = 502 make_errinfo(response, 'connection aborted. too short leadtype data=%r' % data) return response headers_length, = struct.unpack('!h', data) data = response.read(headers_length) if len(data) < headers_length: response.status = 502 make_errinfo(response, 'connection aborted. too short headers data=%r' % data) return response #解压缩并解析头部 raw_response_line, headers_data = zlib.decompress(data, -zlib.MAX_WBITS).split(b'\r\n', 1) raw_response_line = str(raw_response_line, 'iso-8859-1') raw_response_list = raw_response_line.split(None, 2) raw_response_length = len(raw_response_list) if raw_response_length == 3: _, status, reason = raw_response_list response.reason = reason.strip() elif raw_response_length == 2: _, status = raw_response_list response.reason = '' else: return response.status = int(status) #标记服务器端错误信息 headers_data, app_msg = headers_data.split(b'\r\n\r\n') if app_msg: response.app_status = response.status response.reason = 'debug error' response.app_msg = app_msg response.headers = response.msg = parse_headers(BytesIO(headers_data)) if response.app_status == 200: response._method = method if response.status in (204, 205, 304) or 100 <= response.status < 200: response.length = 0 else: try: response.length = int(response.headers.get('Content-Length')) except: response.length = None return response
def read_request(self, raw_requestline): if not isinstance(raw_requestline, string_types): raw_requestline = raw_requestline.decode('latin-1') self.requestline = raw_requestline.rstrip() words = self.requestline.split() if len(words) == 3: self.command, self.path, self.request_version = words if not self._check_http_version(): self.log_error('Invalid http version: %r', raw_requestline) return elif len(words) == 2: self.command, self.path = words if self.command != "GET": self.log_error('Expected GET method: %r', raw_requestline) return self.request_version = "HTTP/0.9" # QQQ I'm pretty sure we can drop support for HTTP/0.9 else: self.log_error('Invalid HTTP method: %r', raw_requestline) return try: self.headers = parse_headers(self.rfile, _class=self.MessageClass) except LineTooLong: ex = sys.exc_info()[1] self.log_error('Line in headers too long: %r', ex.args[0]) return if self.headers.defects: self.log_error('Headers defect:' + ' %r' * len(self.headers.defects), *self.headers.defects) return if self.headers.get("transfer-encoding", "").lower() == "chunked": try: del self.headers["content-length"] except KeyError: pass content_length = self.headers.get("content-length") if content_length is not None: content_length = int(content_length) if content_length < 0: self.log_error('Invalid Content-Length: %r', content_length) return if content_length and self.command in ('GET', 'HEAD'): self.log_error('Unexpected Content-Length') return self.content_length = content_length if self.request_version == "HTTP/1.1": conntype = self.headers.get("Connection", "").lower() if conntype == "close": self.close_connection = True else: self.close_connection = False else: self.close_connection = True return True
def get_headers_and_fp(self): f = io.BytesIO(self.sock.data) f.readline() # read the request line message = client.parse_headers(f) return message, f
def make_headers(fp): return HTTPMessage(fp) if PY2 else parse_headers(fp)
def create_response_info(fp): return parse_headers(fp)
def gae_urlfetch(method, url, headers, payload, appid, getfast=None, **kwargs): # GAE 代理请求不允许设置 Host 头域 if 'Host' in headers: del headers['Host'] metadata = '%s %s HTTP/1.1\r\n' % (method, url) metadata += ''.join('%s: %s\r\n' % (k, v) for k, v in headers.items()) metadata += gae_options if not isinstance(metadata, bytes): metadata = metadata.encode() metadata = zlib.compress(metadata)[2:-4] if payload: if not isinstance(payload, bytes): payload = payload.encode() payload = struct.pack('!h', len(metadata)) + metadata + payload else: payload = struct.pack('!h', len(metadata)) + metadata request_headers = { 'User-Agent': 'Mozilla/5.0', 'Accept-Encoding': 'gzip', 'Content-Length': str(len(payload)) } request_params = gae_params_dict[appid] realurl = 'GAE-' + url qGAE.get() # get start from Queue while True: response = http_gws.request(request_params, payload, request_headers, connection_cache_key='google_gae|:443', getfast=getfast, realmethod=method, realurl=realurl) if response is None: return if response.status not in (200, 404): break app_server = response.headers.get('Server') if app_server == 'Google Frontend': break if GC.GAE_ENABLEPROXY: logging.warning('GAE 前置代理 [%s:%d] 无法正常工作', *response.xip) continue if test_ip_gae(response.xip[0]): break logging.warning('发现并移除非 GAE IP:%s,Server:%s', response.xip[0], app_server) response.app_status = response.status if response.status != 200: return response #解压并解析 chunked & gziped 响应 if 'Transfer-Encoding' in response.headers: responseg = HTTPResponse(GzipSock(response), method=method) responseg.begin() responseg.app_status = 200 responseg.xip = response.xip responseg.sock = response.sock return responseg #读取压缩头部 data = response.read(2) if len(data) < 2: response.status = 502 make_errinfo(response, b'connection aborted. too short leadtype data=' + data) return response headers_length, = struct.unpack('!h', data) data = response.read(headers_length) if len(data) < headers_length: response.status = 502 make_errinfo(response, b'connection aborted. too short headers data=' + data) return response #解压缩并解析头部 raw_response_line, headers_data = zlib.decompress(data, -zlib.MAX_WBITS).split(b'\r\n', 1) raw_response_line = str(raw_response_line, 'iso-8859-1') raw_response_list = raw_response_line.split(None, 2) raw_response_length = len(raw_response_list) if raw_response_length == 3: _, status, reason = raw_response_list response.reason = reason.strip() elif raw_response_length == 2: _, status = raw_response_list response.reason = '' else: return response.status = int(status) #标记服务器端错误信息 headers_data, app_msg = headers_data.split(b'\r\n\r\n') if app_msg: response.app_status = response.status response.reason = 'debug error' response.app_msg = app_msg response.headers = response.msg = parse_headers(BytesIO(headers_data)) if response.app_status == 200: response._method = method if response.status in (204, 205, 304) or 100 <= response.status < 200: response.length = 0 else: try: response.length = int(response.headers.get('Content-Length')) except: response.length = None return response
def __init__(self, headers_fp): headers_fp.seek(0) self.msg = parse_headers(headers_fp)
def read_request(self, raw_requestline): if not isinstance(raw_requestline, string_types): raw_requestline = raw_requestline.decode('latin-1') self.requestline = raw_requestline.rstrip() words = self.requestline.split() if len(words) == 3: self.command, self.path, self.request_version = words if not self._check_http_version(): self.log_error('Invalid http version: %r', raw_requestline) return elif len(words) == 2: self.command, self.path = words if self.command != "GET": self.log_error('Expected GET method: %r', raw_requestline) return self.request_version = "HTTP/0.9" # QQQ I'm pretty sure we can drop support for HTTP/0.9 else: self.log_error('Invalid HTTP method: %r', raw_requestline) return try: self.headers = parse_headers(self.rfile, _class=self.MessageClass) except LineTooLong: ex = sys.exc_info()[1] self.log_error('Line in headers too long: %r', ex.args[0]) return if self.headers.defects: self.log_error( 'Headers defect:' + ' %r' * len(self.headers.defects), *self.headers.defects) return if self.headers.get("transfer-encoding", "").lower() == "chunked": try: del self.headers["content-length"] except KeyError: pass content_length = self.headers.get("content-length") if content_length is not None: content_length = int(content_length) if content_length < 0: self.log_error('Invalid Content-Length: %r', content_length) return if content_length and self.command in ('GET', 'HEAD'): self.log_error('Unexpected Content-Length') return self.content_length = content_length if self.request_version == "HTTP/1.1": conntype = self.headers.get("Connection", "").lower() if conntype == "close": self.close_connection = True else: self.close_connection = False else: self.close_connection = True return True
def parse_request(self): try: self.raw_requestline = self.rfile.readline(65537) except: raise GetReqTimeout() if not self.raw_requestline: raise GetReqTimeout() if len(self.raw_requestline) > 65536: raise ParseReqFail("Recv command line too large") if self.raw_requestline[0] == '\x16': raise socket.error self.command = b'' # set in case of error on the first line self.path = b'' self.request_version = version = self.default_request_version requestline = self.raw_requestline requestline = requestline.rstrip(b'\r\n') self.requestline = requestline words = requestline.split() if len(words) == 3: command, path, version = words if version[:5] != b'HTTP/': raise ParseReqFail("Req command format fail:%s" % requestline) try: base_version_number = version.split(b'/', 1)[1] version_number = base_version_number.split(b".") # RFC 2145 section 3.1 says there can be only one "." and # - major and minor numbers MUST be treated as # separate integers; # - HTTP/2.4 is a lower version than HTTP/2.13, which in # turn is lower than HTTP/12.3; # - Leading zeros MUST be ignored by recipients. if len(version_number) != 2: raise ParseReqFail("Req command format fail:%s" % requestline) version_number = int(version_number[0]), int(version_number[1]) except (ValueError, IndexError): raise ParseReqFail("Req command format fail:%s" % requestline) if version_number >= (1, 1): self.close_connection = 0 if version_number >= (2, 0): raise ParseReqFail("Req command format fail:%s" % requestline) elif len(words) == 2: command, path = words self.close_connection = 1 if command != b'GET': raise ParseReqFail("Req command format HTTP/0.9 line:%s" % requestline) elif not words: raise ParseReqFail("Req command format fail:%s" % requestline) else: raise ParseReqFail("Req command format fail:%s" % requestline) self.command, self.path, self.request_version = command, path, version # Examine the headers and look for a Connection directive headers = client.parse_headers(self.rfile, _class=self.MessageClass) self.headers = dict(map(utils.to_bytes, headers.items())) #email.message_from_file(self.rfile) self.host = self.headers.get(b'Host', b"") conntype = self.headers.get(b'Connection', b"") if conntype.lower() == b'close': self.close_connection = 1 elif conntype.lower() == b'keep-alive': self.close_connection = 0 self.upgrade = self.headers.get(b'Upgrade', b"").lower() return True