Esempio n. 1
0
 def read_response_body(self, request, response):
     expected_size = http1.expected_http_body_size(request, response)
     return http1.read_body(
         self.server_conn.rfile,
         expected_size,
         self.config.options.body_size_limit
     )
Esempio n. 2
0
 def read_request_body(self, request):
     expected_size = http1.expected_http_body_size(request)
     return http1.read_body(
         self.client_conn.rfile,
         expected_size,
         self.config.options.body_size_limit
     )
Esempio n. 3
0
def read_http_body(io, expected_size):
    """
    Read a (possibly malformed) HTTP body.

    :rtype: (body: bytes, is_malformed: bool)
    """
    body_start = io.tell()
    try:
        content = b"".join(http1.read_body(io, expected_size, None))
        if io.read():  # leftover?
            raise HttpException()
        return content, False
    except HttpException:
        io.seek(body_start)
        return io.read(), True
Esempio n. 4
0
 def read_body(io, expected_size):
     """
     Read a (malformed) HTTP body.
     Returns:
         A (body: bytes, is_malformed: bool) tuple.
     """
     body_start = io.tell()
     try:
         content = b"".join(http1.read_body(io, expected_size, None))
         if io.read():  # leftover?
             raise HttpException()
         return content, False
     except HttpException:
         io.seek(body_start)
         return io.read(), True
Esempio n. 5
0
    def test_stream_chunked(self):
        connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        connection.connect(("127.0.0.1", self.proxy.port))
        fconn = connection.makefile("rb")
        spec = '200:h"Transfer-Encoding"="chunked":r:b"4\\r\\nthis\\r\\n11\\r\\nisatest__reachhex\\r\\n0\\r\\n\\r\\n"'
        connection.send(b"GET %s/p/%s HTTP/1.1\r\n" %
                        (self.server.urlbase.encode(), spec.encode()))
        connection.send(b"\r\n")

        resp = http1.read_response_head(fconn)

        assert resp.headers["Transfer-Encoding"] == 'chunked'
        assert resp.status_code == 200

        chunks = list(http1.read_body(fconn, None))
        assert chunks == [b"this", b"isatest__reachhex"]

        connection.close()
Esempio n. 6
0
    def test_stream_chunked(self):
        connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        connection.connect(("127.0.0.1", self.proxy.port))
        fconn = connection.makefile()
        spec = '200:h"Transfer-Encoding"="chunked":r:b"4\\r\\nthis\\r\\n11\\r\\nisatest__reachhex\\r\\n0\\r\\n\\r\\n"'
        connection.send(
            "GET %s/p/%s HTTP/1.1\r\n" % (self.server.urlbase, spec))
        connection.send("\r\n")

        resp = http1.read_response_head(fconn)

        assert resp.headers["Transfer-Encoding"] == 'chunked'
        assert resp.status_code == 200

        chunks = list(http1.read_body(fconn, None))
        assert chunks == ["this", "isatest__reachhex"]

        connection.close()
Esempio n. 7
0
 def read_response_body(self, request, response):
     expected_size = http1.expected_http_body_size(request, response)
     return http1.read_body(self.server_conn.rfile, expected_size, self.config.body_size_limit)
Esempio n. 8
0
 def read_request_body(self, request):
     expected_size = http1.expected_http_body_size(request)
     return http1.read_body(self.client_conn.rfile, expected_size, self.config.body_size_limit)
Esempio n. 9
0
def pcap2mitm(pcapfile, mitmfile, tlsmaster=None, stream=False):
    try:
        from mitmproxy import models
        from mitmproxy.flow import FlowWriter
        from netlib.exceptions import HttpException
        from netlib.http import http1
    except ImportError:
        log.warning(
            "In order to use this utility it is required to have the "
            "mitmproxy tool installed (`pip install httpreplay[mitmproxy]`)")
        return False

    if tlsmaster:
        tlsmaster = read_tlsmaster(tlsmaster)
    else:
        tlsmaster = {}

    handlers = {
        443: lambda: https_handler(tlsmaster),
        4443: lambda: https_handler(tlsmaster),
        "generic": http_handler,
    }

    reader = PcapReader(pcapfile)
    reader.tcp = TCPPacketStreamer(reader, handlers)
    writer = FlowWriter(mitmfile)

    l = reader.process()
    if not stream:
        # Sort the http/https requests and responses by their timestamp.
        l = sorted(l, key=lambda x: x[1])

    for s, ts, protocol, sent, recv in l:
        if protocol not in ("http", "https"):
            continue

        srcip, srcport, dstip, dstport = s

        client_conn = models.ClientConnection.make_dummy((srcip, srcport))
        client_conn.timestamp_start = ts

        server_conn = models.ServerConnection.make_dummy((dstip, dstport))
        server_conn.timestamp_start = ts

        flow = models.HTTPFlow(client_conn, server_conn)

        try:
            sent = io.BytesIO(sent.raw)
            request = http1.read_request_head(sent)
            body_size = http1.expected_http_body_size(request)
            request.data.content = "".join(
                http1.read_body(sent, body_size, None))
        except HttpException as e:
            log.warning("Error parsing HTTP request: %s", e)
            continue

        flow.request = models.HTTPRequest.wrap(request)
        flow.request.timestamp_start = client_conn.timestamp_start

        flow.request.host = dstip
        flow.request.port = dstport
        flow.request.scheme = protocol

        try:
            recv = io.BytesIO(recv.raw)
            response = http1.read_response_head(recv)
            body_size = http1.expected_http_body_size(request, response)
            response.data.content = "".join(
                http1.read_body(recv, body_size, None))
        except HttpException as e:
            log.warning("Error parsing HTTP response: %s", e)
            # Fall through (?)

        flow.response = models.HTTPResponse.wrap(response)
        flow.response.timestamp_start = server_conn.timestamp_start

        flow.id = str(
            uuid.UUID(bytes=hashlib.md5(
                b"%d%d%s%s" %
                (client_conn.timestamp_start, server_conn.timestamp_start,
                 request.data.content, response.data.content)).digest()))

        writer.add(flow)
    return True