Exemplo n.º 1
0
def parse_body(f, headers):
    """Return HTTP body parsed from a file object, given HTTP header dict."""
    if headers.get('transfer-encoding', '').lower() == 'chunked':
        l = []
        found_end = False
        while 1:
            try:
                sz = f.readline().split(None, 1)[0]
            except IndexError:
                raise dpkt.UnpackError('missing chunk size')
            n = int(sz, 16)
            if n == 0:
                found_end = True
            buf = f.read(n)
            if f.readline().strip():
                break
            if n and len(buf) == n:
                l.append(buf)
            else:
                break
        if not found_end:
            raise dpkt.NeedData('premature end of chunked body')
        body = ''.join(l)
    elif 'content-length' in headers:
        n = int(headers['content-length'])
        body = f.read(n)
        if len(body) != n:
            raise dpkt.NeedData('short body (missing %d bytes)' %
                                (n - len(body)))
    elif 'content-type' in headers:
        body = f.read()
    else:
        # XXX - need to handle HTTP/0.9
        body = ''
    return body
def parse_body(f, version, headers):
    """Return HTTP body parsed from a file object, given HTTP header dict."""
    if headers.get('transfer-encoding', '').lower() == 'chunked':
        l = []
        found_end = False
        while 1:
            try:
                sz = f.readline().split(None, 1)[0]
            except IndexError:
                raise dpkt.UnpackError('missing chunk size')
            n = parse_length(sz, 16)
            if n == 0:  # may happen if sz is invalid
                found_end = True
            buf = f.read(n)
            if f.readline().strip():
                break
            if n and len(buf) == n:
                l.append(buf)
            else:
                break
        if settings.strict_http_parse_body and not found_end:
            raise dpkt.NeedData('premature end of chunked body')
        body = ''.join(l)
    elif 'content-length' in headers:
        # Ethan K B: Have observed malformed 0,0 content lengths
        n = parse_length(headers['content-length'])
        body = f.read(n)
        if len(body) != n:
            logging.warn('HTTP content-length mismatch: expected %d, got %d', n,
                         len(body))
            if settings.strict_http_parse_body:
                raise dpkt.NeedData('short body (missing %d bytes)' % (n - len(body)))
    else:
        # XXX - need to handle HTTP/0.9
        # BTW, this function is not called if status code is 204 or 304
        if version == '1.0':
            # we can assume that there are no further
            # responses on this stream, since 1.0 doesn't
            # support keepalive
            body = f.read()
        elif (version == '1.1' and
              headers.get('connection', None) == 'close'):
            # sender has said they won't send anything else.
            body = f.read()
        # there's also the case where other end sends connection: close,
        # but we don't have the architecture to handle that.
        else:
            # we don't really know what to do
            #print 'returning body as empty string:', version, headers
            body = ''
    return body
Exemplo n.º 3
0
def unpack_name(buf, off):
    name = []
    name_length = 0
    saved_off = 0
    start_off = off
    while True:
        if off >= len(buf):
            raise dpkt.NeedData()
        n = ord(buf[off])
        if n == 0:
            off += 1
            break
        elif (n & 0xc0) == 0xc0:
            ptr = struct.unpack('>H', buf[off:off + 2])[0] & 0x3fff
            if ptr >= start_off:
                raise dpkt.UnpackError('Invalid label compression pointer')
            off += 2
            if not saved_off:
                saved_off = off
            start_off = off = ptr
        elif (n & 0xc0) == 0x00:
            off += 1
            name.append(buf[off:off + n])
            name_length += n + 1
            if name_length > 255:
                raise dpkt.UnpackError('name longer than 255 bytes')
            off += n
        else:
            raise dpkt.UnpackError('Invalid label length %02x' % n)
    if not saved_off:
        saved_off = off
    return '.'.join(name), saved_off
Exemplo n.º 4
0
 def unpack(self, buf):
     dpkt.Packet.unpack(self, buf)
     n = self.length - self.__hdr_len__
     if n > len(self.data):
         raise dpkt.NeedData('short message (missing %d bytes)' %
                             (n - len(self.data)))
     self.msg = self.data[:n]
     self.data = self.data[n:]
Exemplo n.º 5
0
    def parse_body(self, file, headers, fix_incomplete=False):
        """Return HTTP body parsed from a file object, given HTTP header dict

        Args:
            file: BytesIO object
            headers: HTTP request headers as dict

        Returns:
            str: body string"""

        if headers.get('transfer-encoding', '').lower() == 'chunked':
            buffer_list = []
            found_end = False
            while True:
                try:
                    size = file.readline().split(None, 1)[0]
                except IndexError:
                    raise dpkt.UnpackError('missing chunk size')
                n = int(size, 16)
                if n == 0:
                    found_end = True
                buffer = file.read(n)
                if file.readline().strip():
                    break
                if n and len(buffer) == n:
                    buffer_list.append(buffer)
                else:
                    break
            if not found_end:
                raise dpkt.NeedData('premature end of chunked body')
            body = b''.join(buffer_list)
        elif 'content-length' in headers:
            n = int(headers['content-length'])
            body = file.read(n)
            if fix_incomplete:
                headers['content-length'] = len(body)
            elif len(body) != n:
                raise dpkt.NeedData('short body (missing %d bytes)' %
                                    (n - len(body)))
        elif 'content-type' in headers:
            body = file.read()
        else:
            # XXX - need to handle HTTP/0.9
            body = b''
        return body
Exemplo n.º 6
0
    def unpack(self, buf):
        """Unpacks a bytes object into component attributes.
        This function is called when an instance of this class is created
        by passing a bytes object to the constructor
        """
        super().unpack(buf)  # unpacks _hdr

        if self.hdr_type != APv6Udp.HDR_TYPE:
            raise dpkt.UnpackError(
                "Unpacking compressed UDP, but encountered wrong type value")

        if self.hdr_co == 0:
            if len(self.data) < 2:
                raise dpkt.NeedData("for UDP checksum")
            self.chksum = struct.unpack("!H", self.data[0:2])[0]
            self.data = self.data[2:]

        p = self.hdr_ports
        if p == APv6Udp.HDR_PORTS_SRC_INLN_DST_INLN:
            if len(self.data) < 4:
                raise dpkt.NeedData("for UDP ports")
            self.src_port = struct.unpack("!H", self.data[0:2])[0]
            self.dst_port = struct.unpack("!H", self.data[2:4])[0]
            self.data = self.data[4:]
        elif p == APv6Udp.HDR_PORTS_SRC_INLN_DST_F0XX:
            if len(self.data) < 3:
                raise dpkt.NeedData("for UDP ports")
            self.src_port = struct.unpack("!H", self.data[0:2])[0]
            self.dst_port = 0xf000 | self.data[2]
            self.data = self.data[3:]
        elif p == APv6Udp.HDR_PORTS_SRC_F0XX_DST_INLN:
            if len(self.data) < 3:
                raise dpkt.NeedData("for UDP ports")
            self.src_port = 0xf000 | self.data[0]
            self.dst_port = struct.unpack("!H", self.data[1:3])[0]
            self.data = self.data[3:]
        elif p == APv6Udp.HDR_PORTS_SRC_F0BX_DST_F0BX:
            if len(self.data) < 1:
                raise dpkt.NeedData("for UDP ports")
            d = self.data[0]
            self.src_port = 0xf0b0 | ((d >> 4) & 0b1111)
            self.dst_port = 0xf0b0 | (d & 0b1111)
            self.data = self.data[1:]
Exemplo n.º 7
0
    def unpack(self, buf):
        """Unpacks a bytes object into component attributes.
        This function is called when an instance of this class is created
        by passing a bytes object to the constructor
        """
        super().unpack(buf)  # unpacks _iphc

        # Hops is in the byte following the IPHC field
        if self._has_hops_field():
            if len(self.data) < 1:
                raise dpkt.NeedData("for hops")
            self.hops = self.data[0]
            self.data = self.data[1:]

        # Hops is encoded in the IPHC HLIM field
        else:
            if self.iphc_hlim == 0b01:
                self.hops = 1
            if self.iphc_hlim == 0b10:
                self.hops = 64
            if self.iphc_hlim == 0b11:
                self.hops = 255

        if self._has_src_field():
            if len(self.data) < 16:
                raise dpkt.NeedData("for src")
            self.src = self.data[0:16]
            self.data = self.data[16:]

        if self._has_dst_field():
            if len(self.data) < 16:
                raise dpkt.NeedData("for dst")
            self.dst = self.data[0:16]
            self.data = self.data[16:]

        # Unpack the payload for known frame types
        if (self.iphc_prefix == APv6Frame.IPHC_PREFIX and len(self.data) > 1):
            # TODO: check for uncompressed UDP, too
            # If the compressed next-header indicates compressed-UDP
            if self.iphc_nhc == 1 and self.data[0] & 0b11111000 == 0b11110000:
                from .trn_udp import APv6Udp
                self.data = APv6Udp(self.data)
Exemplo n.º 8
0
 def unpack(self, buf):
     dpkt.Packet.unpack(self, buf)
     header_length = self.__hdr_len__
     self.data = buf[header_length:header_length+self.length]
     # make sure buffer was long enough
     if len(self.data) != self.length:
         raise dpkt.NeedData('TLSRecord data was too short.')
     # assume compressed and encrypted when it's been parsed from
     # raw data
     self.compressed = True
     self.encrypted = True
Exemplo n.º 9
0
 def unpack(self, buf):
     dpkt.Packet.unpack(self, buf)
     n = self.len - 4
     if n > len(self.data):
         raise dpkt.NeedData('not enough data')
     self.msg, self.data = self.data[:n], self.data[n:]
     try:
         p = self._msgsw[self.msgid](self.msg)
         setattr(self, p.__class__.__name__.lower(), p)
     except (KeyError, dpkt.UnpackError):
         pass
def parse_body(f, headers):
    """Return HTTP body parsed from a file object, given HTTP header dict."""
    if headers.get('transfer-encoding', '').lower() == 'chunked':
        l = []
        found_end = False
        while 1:
            try:
                sz = f.readline().split(None, 1)[0]
            except IndexError:
                raise dpkt.UnpackError('missing chunk size')
            n = int(sz, 16)
            if n == 0:
                found_end = True
            buf = f.read(n)
            if f.readline().strip():
                break
            if n and len(buf) == n:
                l.append(buf)
            else:
                break
        if settings.strict_http_parse_body and not found_end:
            raise dpkt.NeedData('premature end of chunked body')
        body = ''.join(l)
    elif 'content-length' in headers:
        # Have observed malformed 0,0 content lengths
        if headers['content-length'] == '0,0':
            n = 0
        else:
            n = int(headers['content-length'])
        body = f.read(n)
        if len(body) != n:
            logging.warn('HTTP content-length mismatch: expected %d, got %d',
                         n, len(body))
            if settings.strict_http_parse_body:
                raise dpkt.NeedData('short body (missing %d bytes)' %
                                    (n - len(body)))
    else:
        # XXX - need to handle HTTP/0.9
        body = ''
    return body
Exemplo n.º 11
0
 def unpack(self, buf):
     super(Gzip, self).unpack(buf)
     if self.flags & GZIP_FEXTRA:
         if len(self.data) < 2:
             raise dpkt.NeedData('Gzip extra')
         n = struct.unpack('<H', self.data[:2])[0]
         if len(self.data) < 2 + n:
             raise dpkt.NeedData('Gzip extra')
         self.extra = GzipExtra(self.data[2:2 + n])
         self.data = self.data[2 + n:]
     if self.flags & GZIP_FNAME:
         n = self.data.find('\x00')
         if n == -1:
             raise dpkt.NeedData('Gzip end of file name not found')
         self.filename = self.data[:n]
         self.data = self.data[n + 1:]
     if self.flags & GZIP_FCOMMENT:
         n = self.data.find('\x00')
         if n == -1:
             raise dpkt.NeedData('Gzip end of comment not found')
         self.comment = self.data[:n]
         self.data = self.data[n + 1:]
     if self.flags & GZIP_FENCRYPT:
         if len(self.data) < GZIP_FENCRYPT_LEN:
             raise dpkt.NeedData('Gzip encrypt')
         self.data = self.data[GZIP_FENCRYPT_LEN:]  # XXX - skip
     if self.flags & GZIP_FHCRC:
         if len(self.data) < 2:
             raise dpkt.NeedData('Gzip hcrc')
         self.data = self.data[2:]  # XXX - skip
Exemplo n.º 12
0
def parse_headers(f):
    """Return dict of HTTP headers parsed from a file object."""
    d = {}
    while 1:
        line = f.readline()
        if not line:
            raise dpkt.NeedData('premature end of headers')
        line = line.strip()
        if not line:
            break
        l = line.split(None, 1)
        if not l[0].endswith(':'):
            raise dpkt.UnpackError('invalid header: %r' % line)
        k = l[0][:-1].lower()
        d[k] = len(l) != 1 and l[1] or ''
    return d
Exemplo n.º 13
0
def parse_headers(f):
    """Return dict of HTTP headers parsed from a file object."""
    d = {}
    while 1:
        line = f.readline()
        if not line:
            raise dpkt.NeedData('premature end of headers')
        line = line.strip()
        if not line:
            break
        l = line.split(':', 1)
        if len(l[0].split()) != 1:
            raise dpkt.UnpackError('invalid header: %r' % line)
        k = l[0].lower()
        v = len(l) != 1 and l[1].lstrip() or ''
        if k in d:
            if not type(d[k]) is list:
                d[k] = [d[k]]
            d[k].append(v)
        else:
            d[k] = v
    return d
Exemplo n.º 14
0
def _read_chunked(rfile):
    """
    Read a HTTP body with chunked transfer encoding.

    (adapted from mitmproxy's netlib.http.http1)
    """
    while True:
        line = rfile.readline(128)
        if line == b"":
            raise dpkt.NeedData("premature end of chunked body")
        if line != b"\r\n" and line != b"\n":
            try:
                length = int(line, 16)
            except ValueError:
                raise dpkt.UnpackError("Invalid chunked encoding length: %s" %
                                       line)
            chunk = rfile.read(length)
            suffix = rfile.readline(5)
            if suffix != b"\r\n":
                raise dpkt.UnpackError("Malformed chunked body")
            if length == 0:
                return
            yield chunk
Exemplo n.º 15
0
    def unpack(self, buf):
        """Unpacks a bytes object into component attributes.
        This function is called when an instance of this class is created
        by passing a bytes object to the constructor
        """
        super().unpack(buf)

        # validate the PID
        if not self.is_heymac():
            raise ValueError("Invalid PID Protocol value")

        # The Fctl field can be every bit-combination
        # so there's no illegal value; no way to validate.
        # All fields after Fctl are optional (defined as '0s')
        # so we conditionally pull them from the .data bytearray

        if self._has_netid_field():
            if len(self.data) < 2:
                raise dpkt.NeedData("for netid")
            self.netid = self.data[0:2]
            self.data = self.data[2:]

        if self._has_daddr_field():
            sz = self._sizeof_addr_field()
            if len(self.data) < sz:
                raise dpkt.NeedData("for daddr")
            self.daddr = self.data[0:sz]
            self.data = self.data[sz:]

        if self._has_ie_field():
            sz_hie, sz_bie = self._sizeof_ie_fields()
            if len(self.data) < sz_hie + sz_bie:
                raise dpkt.NeedData("for IEs")
            self.hie = self.data[0:sz_hie]
            self.bie = self.data[sz_hie:sz_hie + sz_bie]
            self.data = self.data[sz_hie + sz_bie:]

        if self._has_saddr_field():
            sz = self._sizeof_addr_field()
            if len(self.data) < sz:
                raise dpkt.NeedData("for saddr")
            self.saddr = self.data[0:sz]
            self.data = self.data[sz:]

        # The payload comes after SrcAddr, but its size is unknown.
        # So we parse any multihop data from the tail of the packet, backward.
        if self._has_multihop_fields():
            sz = 1 + self._sizeof_addr_field()
            if len(self.data) < sz:
                raise dpkt.NeedData("for hops/txaddr")
            self.txaddr = self.data[-sz:]
            self.data = self.data[:-sz]
            self.hops = self.data[-1:]
            self.data = self.data[:-1]

        # At this point self.data contains the payload, which may be empty.
        # The first byte of the payload denotes its type.
        # Create an instance of its type and keep it as self.payld
        if self.data:
            if self.is_data_mac_layer():
                try:
                    self.payld = HeyMacCmd.get_instance(self.data)
                except:
                    logging.info("invalid MAC frame: %s", self.data.encode())
                    raise ValueError()
            elif self.is_data_net_layer():
                try:
                    self.payld = APv6Frame(self.data)
                except:
                    logging.info("invalid APv6 pkt: %s", self.data.encode())
                    raise ValueError()