def parse_headers(self, data): headers = [] # Split lines on \r\n keeping the \r\n on each line lines = [line + "\r\n" for line in data.split("\r\n")] # Parse headers into key/value pairs paying attention # to continuation lines. while len(lines): if len(headers) > self.limit_request_fields: raise LimitRequestHeaders("limit request headers fields") # Parse initial header name : value pair. curr = lines.pop(0) if curr.find(":") < 0: raise InvalidHeader(curr.strip()) name, value = curr.split(":", 1) name = name.rstrip(" \t").upper() if self.hdrre.search(name): raise InvalidHeaderName(name) name, value = name.strip(), [value.lstrip()] # Consume value continuation lines while len(lines) and lines[0].startswith((" ", "\t")): value.append(lines.pop(0)) value = ''.join(value).rstrip() headers.append((name, value)) return headers
def parse(self, unreader): buf = StringIO() self.get_data(unreader, buf, stop=True) # Request line data = buf.getvalue() while True: idx = data.find("\r\n") if idx >= 0: # check if the request line is too large if idx > self.limit_request_line > 0: raise LimitRequestLine(idx, self.limit_request_line) break # check if chunk is too large before read next chunk if len(data) - 2 > self.limit_request_line and \ self.limit_request_line> 0 : raise LimitRequestLine(len(data), self.limit_request_line) self.get_data(unreader, buf) data = buf.getvalue() self.parse_request_line(data[:idx]) buf = StringIO() buf.write(data[idx + 2:]) # Skip \r\n # Headers data = buf.getvalue() idx = data.find("\r\n\r\n") done = data[:2] == "\r\n" while True: idx = data.find("\r\n\r\n") done = data[:2] == "\r\n" if idx < 0 and not done: self.get_data(unreader, buf) data = buf.getvalue() if len(data) > self.max_buffer_headers: raise LimitRequestHeaders("max buffer headers") else: break if done: self.unreader.unread(data[2:]) return "" self.headers = self.parse_headers(data[:idx]) ret = data[idx + 4:] buf = StringIO() return ret
def parse_headers(self, data): headers = [] # Split lines on \r\n keeping the \r\n on each line lines = [bytes_to_str(line) + "\r\n" for line in data.split(b"\r\n")] # Parse headers into key/value pairs paying attention # to continuation lines. while lines: if len(headers) >= self.limit_request_fields: raise LimitRequestHeaders("limit request headers fields") # Parse initial header name : value pair. curr = lines.pop(0) header_length = len(curr) if curr.find(":") < 0: raise InvalidHeader(curr.strip()) name, value = curr.split(":", 1) name = name.rstrip(" \t").upper() if HEADER_RE.search(name): raise InvalidHeaderName(name) name, value = name.strip(), [value.lstrip()] # Consume value continuation lines while lines and lines[0].startswith((" ", "\t")): curr = lines.pop(0) header_length += len(curr) if header_length > self.limit_request_field_size > 0: raise LimitRequestHeaders("limit request headers " + "fields size") value.append(curr) value = ''.join(value).rstrip() if header_length > self.limit_request_field_size > 0: raise LimitRequestHeaders("limit request headers fields size") headers.append((name, value)) return headers
def parse(self, unreader): buf = io.BytesIO() self.get_data(unreader, buf, stop=True) # get request line, rbuf: 一行 \r\n 后面多余的数据 line, rbuf = self.read_line(unreader, buf, self.limit_request_line) # todo: proxy protocol if self.proxy_protocol(bytes_to_str(line)): # get next request line buf = io.BytesIO() buf.write(rbuf) line, rbuf = self.read_line(unreader, buf, self.limit_request_line) # 处理请求行, 如: b'GET /test-db/1 HTTP/1.1' self.parse_request_line(line) buf = io.BytesIO() buf.write(rbuf) # data 如: b'Host: localhost:8000\r\nAccept: */*\r\n\r\na=1&b=2' # 既包括请求头, 也包括请求体(a=1&b=2) data = buf.getvalue() idx = data.find(b"\r\n\r\n") done = data[:2] == b"\r\n" while True: idx = data.find(b"\r\n\r\n") # todo: 什么时候data的前两位会是\r\n呢? done = data[:2] == b"\r\n" if idx < 0 and not done: self.get_data(unreader, buf) data = buf.getvalue() if len(data) > self.max_buffer_headers: raise LimitRequestHeaders("max buffer headers") else: # idx >= 0 or done break if done: self.unreader.unread(data[2:]) return b"" # 请求头 self.headers = self.parse_headers(data[:idx]) # 请求体 ret = data[idx + 4:] buf = None return ret
def parse(self, unreader): buf = BytesIO() self.get_data(unreader, buf, stop=True) # get request line line, rbuf = self.read_line(unreader, buf, self.limit_request_line) # proxy protocol if self.proxy_protocol(bytes_to_str(line)): # get next request line buf = BytesIO() buf.write(rbuf) line, rbuf = self.read_line(unreader, buf, self.limit_request_line) self.parse_request_line(line) buf = BytesIO() buf.write(rbuf) # Headers data = buf.getvalue() idx = data.find(b"\r\n\r\n") done = data[:2] == b"\r\n" while True: idx = data.find(b"\r\n\r\n") done = data[:2] == b"\r\n" if idx < 0 and not done: self.get_data(unreader, buf) data = buf.getvalue() if len(data) > self.max_buffer_headers: raise LimitRequestHeaders("max buffer headers") else: break if done: self.unreader.unread(data[2:]) return b"" self.headers = self.parse_headers(data[:idx]) ret = data[idx + 4:] buf = None return ret
def parse_headers(self, data): cfg = self.cfg headers = [] # Split lines on \r\n keeping the \r\n on each line lines = [bytes_to_str(line) + "\r\n" for line in data.split(b"\r\n")] # handle scheme headers scheme_header = False secure_scheme_headers = {} if '*' in cfg.forwarded_allow_ips: secure_scheme_headers = cfg.secure_scheme_headers elif isinstance(self.unreader, SocketUnreader): remote_addr = self.unreader.sock.getpeername() if self.unreader.sock.family in (socket.AF_INET, socket.AF_INET6): remote_host = remote_addr[0] if remote_host in cfg.forwarded_allow_ips: secure_scheme_headers = cfg.secure_scheme_headers elif self.unreader.sock.family == socket.AF_UNIX: secure_scheme_headers = cfg.secure_scheme_headers # Parse headers into key/value pairs paying attention # to continuation lines. while lines: if len(headers) >= self.limit_request_fields: raise LimitRequestHeaders("limit request headers fields") # Parse initial header name : value pair. curr = lines.pop(0) header_length = len(curr) if curr.find(":") < 0: raise InvalidHeader(curr.strip()) name, value = curr.split(":", 1) if self.cfg.strip_header_spaces: name = name.rstrip(" \t").upper() else: name = name.upper() if HEADER_RE.search(name): raise InvalidHeaderName(name) name, value = name.strip(), [value.lstrip()] # Consume value continuation lines while lines and lines[0].startswith((" ", "\t")): curr = lines.pop(0) header_length += len(curr) if header_length > self.limit_request_field_size > 0: raise LimitRequestHeaders("limit request headers " + "fields size") value.append(curr) value = ''.join(value).rstrip() if header_length > self.limit_request_field_size > 0: raise LimitRequestHeaders("limit request headers fields size") if name in secure_scheme_headers: secure = value == secure_scheme_headers[name] scheme = "https" if secure else "http" if scheme_header: if scheme != self.scheme: raise InvalidSchemeHeaders() else: scheme_header = True self.scheme = scheme headers.append((name, value)) return headers