示例#1
0
文件: http.py 项目: yyasuda/scapy
 def post_dissect(self, s):
     if not conf.contribs["http"]["auto_compression"]:
         return s
     encodings = self._get_encodings()
     # Un-chunkify
     if "chunked" in encodings:
         data = b""
         while s:
             length, _, body = s.partition(b"\r\n")
             try:
                 length = int(length, 16)
             except ValueError:
                 # Not a valid chunk. Ignore
                 break
             else:
                 load = body[:length]
                 if body[length:length + 2] != b"\r\n":
                     # Invalid chunk. Ignore
                     break
                 s = body[length + 2:]
                 data += load
         if not s:
             s = data
     # Decompress
     try:
         if "deflate" in encodings:
             import zlib
             s = zlib.decompress(s)
         elif "gzip" in encodings:
             s = gzip_decompress(s)
         elif "compress" in encodings:
             import lzw
             s = lzw.decompress(s)
         elif "br" in encodings:
             if _is_brotli_available:
                 s = brotli.decompress(s)
             else:
                 log_loading.info(
                     "Can't import brotli. brotli decompression "
                     "will be ignored !")
         elif "zstd" in encodings:
             if _is_zstd_available:
                 # Using its streaming API since its simple API could handle
                 # only cases where there is content size data embedded in
                 # the frame
                 bio = io.BytesIO(s)
                 reader = zstandard.ZstdDecompressor().stream_reader(bio)
                 s = reader.read()
             else:
                 log_loading.info(
                     "Can't import zstandard. zstd decompression "
                     "will be ignored !")
     except Exception:
         # Cannot decompress - probably incomplete data
         pass
     return s
示例#2
0
 def post_dissect(self, s):
     if not conf.contribs["http"]["auto_compression"]:
         return s
     encodings = self._get_encodings()
     # Un-chunkify
     if "chunked" in encodings:
         data = b""
         while s:
             length, _, body = s.partition(b"\r\n")
             try:
                 length = int(length, 16)
             except ValueError:
                 # Not a valid chunk. Ignore
                 break
             else:
                 load = body[:length]
                 if body[length:length + 2] != b"\r\n":
                     # Invalid chunk. Ignore
                     break
                 s = body[length + 2:]
                 data += load
         if not s:
             s = data
     # Decompress
     try:
         if "deflate" in encodings:
             import zlib
             s = zlib.decompress(s)
         elif "gzip" in encodings:
             s = gzip_decompress(s)
         elif "compress" in encodings:
             import lzw
             s = lzw.decompress(s)
         elif "br" in encodings:
             if _is_brotli_available:
                 s = brotli.decompress(s)
             else:
                 log_loading.info(
                     "Can't import brotli. brotli decompression "
                     "will be ignored !"
                 )
     except Exception:
         # Cannot decompress - probably incomplete data
         pass
     return s