def _parse_headers(self, data): idx = data.find(b("\r\n\r\n")) if idx < 0: # we don't have all headers return False # Split lines on \r\n keeping the \r\n on each line lines = [bytes_to_str(line) + "\r\n" for line in data[:idx].split(b("\r\n"))] # Parse headers into key/value pairs paying attention # to continuation lines. while len(lines): # Parse initial header name : value pair. curr = lines.pop(0) if curr.find(":") < 0: raise InvalidHeader("invalid line %s" % curr.strip()) name, value = curr.split(":", 1) name = name.rstrip(" \t").upper() if HEADER_RE.search(name): raise InvalidHeader("invalid header name %s" % name) name, value = name.strip(), [value.lstrip()] # Consume value continuation lines while len(lines) and lines[0].startswith((" ", "\t")): value.append(lines.pop(0)) value = ''.join(value).rstrip() # store new header value self._headers.add_header(name, value) # update WSGI environ key = 'HTTP_%s' % name.upper().replace('-', '_') self._environ[key] = value # detect now if body is sent by chunks. clen = self._headers.get('content-length') te = self._headers.get('transfer-encoding', '').lower() if clen is not None: try: self._clen_rest = self._clen = int(clen) except ValueError: pass else: self._chunked = (te == 'chunked') if not self._chunked: self._clen_rest = MAXSIZE # detect encoding and set decompress object encoding = self._headers.get('content-encoding') if self.decompress: if encoding == "gzip": self.__decompress_obj = zlib.decompressobj(16+zlib.MAX_WBITS) elif encoding == "deflate": self.__decompress_obj = zlib.decompressobj() rest = data[idx+4:] self._buf = [rest] self.__on_headers_complete = True return len(rest)
def load_event(s): data = json.loads(s) name = bytes_to_str(data["name"].encode("utf-8")) args = [] for arg in data["args"]: if isinstance(arg, text_type): arg = arg.encode("utf-8") args.append(arg) kwargs = {} for k, v in data["kwargs"].items(): if isinstance(v, text_type): v = v.encode("utf-8") kwargs[str(k)] = v e = Event.create(name, *args, **kwargs) e.success = bool(data["success"]) e.failure = bool(data["failure"]) e.notify = bool(data["notify"]) e.channels = tuple(data["channels"]) for k, v in dict(data["meta"]).items(): setattr(e, k, v) return e, data["id"]
def decode(self, data): obj = json.loads(data) name = bytes_to_str(obj["name"].encode("utf-8")) args = [] for arg in obj["args"]: if isinstance(arg, text_type): arg = arg.encode("utf-8") args.append(arg) kwargs = {} for k, v in obj["kwargs"].items(): if isinstance(v, text_type): v = v.encode("utf-8") kwargs[str(k)] = v e = Event.create(name, *args, **kwargs) e.success = bool(obj["success"]) e.failure = bool(obj["failure"]) e.notify = bool(obj["notify"]) e.channels = tuple(obj["channels"]) return e
def _parse_headers(self, data): idx = data.find(b("\r\n\r\n")) if idx < 0: # we don't have all headers return False # Split lines on \r\n keeping the \r\n on each line lines = [bytes_to_str(line) + "\r\n" for line in data[:idx].split(b("\r\n"))] # Parse headers into key/value pairs paying attention # to continuation lines. while len(lines): # Parse initial header name : value pair. curr = lines.pop(0) if curr.find(":") < 0: raise InvalidHeader("invalid line %s" % curr.strip()) name, value = curr.split(":", 1) name = name.rstrip(" \t").upper() if HEADER_RE.search(name): raise InvalidHeader("invalid header name %s" % name) name, value = name.strip(), [value.lstrip()] # Consume value continuation lines while len(lines) and lines[0].startswith((" ", "\t")): value.append(lines.pop(0)) value = ''.join(value).rstrip() # store new header value self._headers.add_header(name, value) # update WSGI environ key = 'HTTP_%s' % name.upper().replace('-', '_') self._environ[key] = value # detect now if body is sent by chunks. clen = self._headers.get('content-length') te = self._headers.get('transfer-encoding', '').lower() if clen is not None: try: self._clen_rest = self._clen = int(clen) except ValueError: pass else: self._chunked = (te == 'chunked') if not self._chunked: self._clen_rest = MAXSIZE # detect encoding and set decompress object encoding = self._headers.get('content-encoding') if self.decompress: if encoding == "gzip": self.__decompress_obj = zlib.decompressobj(16 + zlib.MAX_WBITS) elif encoding == "deflate": self.__decompress_obj = zlib.decompressobj() rest = data[idx + 4:] self._buf = [rest] self.__on_headers_complete = True return len(rest)
def execute(self, data, length): # end of body can be passed manually by putting a length of 0 if length == 0: self.on_message_complete = True return length # start to parse nb_parsed = 0 while True: if not self.__on_firstline: idx = data.find(b("\r\n")) if idx < 0: self._buf.append(data) return len(data) else: self.__on_firstline = True self._buf.append(data[:idx]) first_line = bytes_to_str(b("").join(self._buf)) nb_parsed = nb_parsed + idx + 2 rest = data[idx+2:] data = b("") if self._parse_firstline(first_line): self._buf = [rest] else: return nb_parsed elif not self.__on_headers_complete: if data: self._buf.append(data) data = b("") try: to_parse = b("").join(self._buf) ret = self._parse_headers(to_parse) if not ret: return length nb_parsed = nb_parsed + (len(to_parse) - ret) except InvalidHeader as e: self.errno = INVALID_HEADER self.errstr = str(e) return nb_parsed elif not self.__on_message_complete: if not self.__on_message_begin: self.__on_message_begin = True if data: self._buf.append(data) data = b("") ret = self._parse_body() if ret is None: return length elif ret < 0: return ret elif ret == 0: self.__on_message_complete = True return length else: nb_parsed = max(length, ret) else: return 0
def execute(self, data, length): # end of body can be passed manually by putting a length of 0 if length == 0: self.on_message_complete = True return length # start to parse nb_parsed = 0 while True: if not self.__on_firstline: idx = data.find(b("\r\n")) if idx < 0: self._buf.append(data) return len(data) else: self.__on_firstline = True self._buf.append(data[:idx]) first_line = bytes_to_str(b("").join(self._buf)) nb_parsed = nb_parsed + idx + 2 rest = data[idx + 2:] data = b("") if self._parse_firstline(first_line): self._buf = [rest] else: return nb_parsed elif not self.__on_headers_complete: if data: self._buf.append(data) data = b("") try: to_parse = b("").join(self._buf) ret = self._parse_headers(to_parse) if not ret: return length nb_parsed = nb_parsed + (len(to_parse) - ret) except InvalidHeader as e: self.errno = INVALID_HEADER self.errstr = str(e) return nb_parsed elif not self.__on_message_complete: if not self.__on_message_begin: self.__on_message_begin = True if data: self._buf.append(data) data = b("") ret = self._parse_body() if ret is None: return length elif ret < 0: return ret elif ret == 0: self.__on_message_complete = True return length else: nb_parsed = max(length, ret) else: return 0