Exemplo n.º 1
0
class Request:
    parser = None
    _body = None

    def __init__(self, data):
        self.parser = HttpParser()
        self.parser.execute(data, len(data))

        self.method = self.parser.get_method()
        self.path = self.parser.get_path()
        self.headers = self.parser.get_headers()
        self.querystring = parse_qs(unquote(self.parser.get_query_string()),
                                    keep_blank_values=True)
        if self.querystring:
            self.path += "?{}".format(self.parser.get_query_string())

    def add_data(self, data):
        self.parser.execute(data, len(data))

    @property
    def body(self):
        if self._body is None:
            self._body = decode_from_bytes(self.parser.recv_body())
        return self._body

    def __str__(self):
        return "{} - {} - {}".format(self.method, self.path, self.headers)
Exemplo n.º 2
0
def proxy(data):
    recved = len(data)

    idx = data.find("\r\n")
    if idx <= 0:
        return

    line, rest = data[:idx], data[idx:]
    if line.startswith("CONNECT"):
        parts = line.split(None)
        netloc = parts[1]
        remote = parse_address(netloc, 80)

        reply_msg = "%s 200 OK\r\n\r\n" % parts[2]
        return {"remote": remote, 
                "reply": reply_msg,
                "data": ""}


    parser = HttpParser()
    parsed = parser.execute(data, recved)
    if parsed != recved:
        return  { 'close':'HTTP/1.0 502 Gateway Error\r\n\r\nError parsing request'}

    if not parser.get_url():
        return

    parsed_url = urlparse.urlparse(parser.get_url())

    is_ssl = parsed_url.scheme == "https"
    remote = parse_address(parsed_url.netloc, 80)

    return {"remote": remote, 
            "ssl": is_ssl}
Exemplo n.º 3
0
def handle_batch_client(sock):
    recvbuf = ""
    while True:
        rds, _, _ = select.select([sock], [], [], 60 * 5)
        if not rds:
            break

        data = sock.recv(1024)
        if not data:
            break
        recvbuf += data

        pos = recvbuf.find("\r\n\r\n")
        if pos == -1:
            continue
        parser = HttpParser()
        nparsed = parser.execute(recvbuf, pos + 4)
        if nparsed != pos + 4:
            logging.debug("pos:%d, nparsed:%d, recvbuf:%r", pos, nparsed, recvbuf)
        assert nparsed == pos + 4
        assert parser.is_headers_complete()
        headers = parser.get_headers()
        content_length = int(headers["Content-Length"]) if headers.has_key("Content-Length") else 0
        logging.debug("content length:%d", content_length)
        recvbuf = recvbuf[pos + 4 :]
        preread = recvbuf[:content_length]
        recvbuf = recvbuf[content_length:]
        keepalived = handle_request(sock, parser, preread)
        if not keepalived:
            break

    logging.debug("close client")
    sock.close()
Exemplo n.º 4
0
    def __init__(self):
        # Loading the protocol certificates.
        ssl_context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
        ssl_context.load_cert_chain("ssl/server.crt", "ssl/server.key")

        # Initiates the HttpParser object.
        self.http_parser = HttpParser()

        # Creates the TLS flag.
        self.using_tls = False

        # Initiating our HTTP transport with the emulated client.
        self.HTTP_Protocol = HTTP(using_ssl=False)

        # Setting our SSL context for the server.
        ssl_context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
        ssl_context.load_cert_chain("ssl/server.crt", "ssl/server.key")

        # Opening our HTTPS transport.
        self.HTTPS_Protocol = asyncio.sslproto.SSLProtocol(
            loop=asyncio.get_running_loop(),
            app_protocol=HTTP(using_ssl=True),
            sslcontext=ssl_context,
            waiter=None,
            server_side=True,
        )
Exemplo n.º 5
0
    def run(self):
        while self.running:
            data, addr = self.listener.recvfrom(4096)
            http_pareser = HttpParser()
            http_pareser.execute(data, len(data))
            headers = http_pareser.get_headers()

            try:
                if headers['NTS'] == 'ssdp:alive' and headers[
                        'NT'] == 'urn:zenterio-net:service:X-CTC_RemotePairing:1':
                    stb = STB(uuid=headers['USN'][5:41],
                              location=headers['LOCATION'],
                              nt=headers['NT'])
                    self.mutex.acquire(1)
                    for x in self.stbs:
                        if x.uuid == stb.uuid:
                            break
                    else:
                        self.stbs.append(stb)
                        log.info('-------------------------------------------')
                        log.info("New STB detected!")
                        log.info("UUID: " + stb.uuid)
                        log.info("Location: " + stb.location)
                        log.info("NT: " + stb.nt)
                    self.mutex.release()
            except:
                pass
Exemplo n.º 6
0
def post_sync(sock, masterip, masterport):
    obj = {"last_fileno": haystack.haystack_last_fileno}
    body = json.dumps(obj)
    sock.send("POST /sync HTTP/1.1\r\n")
    sock.send("Host: %s:%d\r\n" % (masterip, masterport))
    sock.send("Content-Length: %d\r\n" % len(body))
    sock.send("Content-Type: application/json\r\n")
    sock.send("Connection: keep-alive\r\n")
    sock.send("\r\n")
    sock.send(body)

    parser = HttpParser()
    while True:
        #!!!ugly prevent recveive next http request
        data = sock.recv(1)
        if not data:
            return False

        recved = len(data)
        nparsed = parser.execute(data, recved)
        assert nparsed == recved
        if parser.is_message_complete():
            break

    return parser.get_status_code() == 200
Exemplo n.º 7
0
def post_report(sock):
    st = os.statvfs(haystack_path)
    available_size = st.f_bavail * st.f_frsize
    obj = {}
    obj["listenip"] = listenip
    obj["listenport"] = listenport
    obj["disk_available_size"] = available_size
    obj["master"] = master
    obj["groupid"] = groupid
    obj["last_fileno"] = haystack.haystack_last_fileno
    body = json.dumps(obj)
    sock.send("POST /report HTTP/1.1\r\n")
    sock.send("Host: %s:%d\r\n" % (track.ip, track.port))
    sock.send("Content-Length: %d\r\n" % len(body))
    sock.send("Content-Type: application/json\r\n")
    sock.send("Connection: keep-alive\r\n")
    sock.send("\r\n")
    sock.send(body)

    parser = HttpParser()
    while True:
        data = sock.recv(1024)
        if not data:
            return False

        recved = len(data)
        nparsed = parser.execute(data, recved)
        assert nparsed == recved
        if parser.is_message_complete():
            break

    return parser.get_status_code() == 200
Exemplo n.º 8
0
    async def handle_url(self, request: HttpParser) -> HttpResponse:
        method = request.get_method().upper()
        if method not in ('GET', 'HEAD'):
            return HttpResponse(405)
        path = request.get_path()
        if path.endswith('/'):
            path += 'index.html'
        relative = os.path.relpath(url2pathname(path), '/')
        filename = os.path.join(self.root_dir, relative)
        try:
            byte_range = None
            if 'Range' in request.get_headers():
                # Not RFC 7233 compliant
                range_match = re.match(r'bytes=(\d+)-(\d+)',
                                       request.get_headers()['Range'])
                if not range_match:
                    return HttpResponse(400, 'Invalid Range header')
                start, end = map(int, range_match.groups())
                # Python range is exclusive, HTTP Range is inclusive
                byte_range = range(start, end + 1)
            length = 0
            async with aiofiles.open(filename, 'rb') as f:
                if method == 'GET':
                    if byte_range is not None:
                        await f.seek(byte_range.start)
                        data = await f.read(len(byte_range))
                        byte_range = range(byte_range.start,
                                           byte_range.start + len(data))
                        await f.seek(0, os.SEEK_END)
                        length = await f.tell()
                        response = HttpResponse(206, data)
                    else:
                        data = await f.read()
                        response = HttpResponse(200, data)
                else:
                    # Used instead of os.stat to ensure the file can be accessed
                    response = HttpResponse(200)
                    await f.seek(0, os.SEEK_END)
                    length = await f.tell()
                    if byte_range is not None:
                        byte_range = range(byte_range.start,
                                           min(length, byte_range.stop))
                    response.headers['Content-Length'] = length
            if byte_range is not None:
                response.headers['Content-Range'] = 'bytes %d-%d/%d' % (
                    byte_range.start, byte_range.stop - 1, length)

        except FileNotFoundError:
            return HttpResponse(404,
                                'This is not the file you are looking for')
        except PermissionError:
            return HttpResponse(403)
        _, extension = os.path.splitext(filename)
        extension = extension[1:]
        if extension.lower() in self.mime_types:
            response.headers['Content-Type'] = self.mime_types[
                extension.lower()]
        response.headers['Last-Modified'] = formatdate(
            os.stat(filename).st_mtime, False, True)
        return response
Exemplo n.º 9
0
def iter_warc_records(warc_file, domain_whitelist=None, only_homepages=None):
    """ Selective iterator over records in a WARC file """

    for _, record in enumerate(warc_file):

        if not record.url:
            continue

        if record['Content-Type'] != 'application/http; msgtype=response':
            continue

        url = URL(record.url, check_encoding=True)

        if domain_whitelist is not None:
            if url.domain not in domain_whitelist:
                continue

        elif only_homepages:
            if url.parsed.path != "/" or url.parsed.query != "":
                continue

        payload = record.payload.read()
        parser = HttpParser()
        parser.execute(payload, len(payload))

        headers = parser.get_headers()

        if 'text/html' not in headers.get("content-type", ""):
            # print "Not HTML?", record.url, headers
            continue

        yield url, headers, parser.recv_body()
Exemplo n.º 10
0
    def saveTCP(self, index, path):
        if os.name == 'nt':
            path = path.replace('file://', '')[1:]
        else:
            path = path.replace('file://', '')

        if (index + 1) in network_sniffer.getTcpBodies():
            f = open(path, 'wb')

            try:
                p = HttpParser()
                recved = len(network_sniffer.getTcpBodies()[index + 1]['data'])
                nparsed = p.execute(
                    network_sniffer.getTcpBodies()[index + 1]['data'], recved)
                assert nparsed == recved
                f.write(p.recv_body())

                ret = '解析到 HTTP 报文,已保存 HTTP 数据。'
            except AssertionError:
                f.write(network_sniffer.getTcpBodies()[index + 1]['data'])

                ret = '未解析到 HTTP 报文,已保存 TCP 数据。'

            f.close()

            return ret

        else:
            return '数据包不是 TCP 分段的最后一段。'
Exemplo n.º 11
0
def main():

    p = HttpParser()
    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    body = []
    header_done = False
    try:
        s.connect(("gunicorn.org", 80))
        s.send("GET / HTTP/1.1\r\nHost: gunicorn.org\r\n\r\n")

        while True:
            data = s.recv(1024)
            if not data:
                break

            recved = len(data)
            nparsed = p.execute(data, recved)
            assert nparsed == recved

            if p.is_headers_complete() and not header_done:
                print p.get_headers()
                print p.get_headers()["content-length"]
                header_done = True

            if p.is_partial_body():
                body.append(p.recv_body())

            if p.is_message_complete():
                break

        print "".join(body)

    finally:
        s.close()
Exemplo n.º 12
0
def make_request(sock, server_name):
    """
    Given an open socket, makes a simple HTTP request, parses the response, and
    returns a dictionary containing the HTTP headers that were returned by the
    server.
    """
    p = HttpParser()

    request = ('GET / HTTP/1.0\r\n' +
               'User-Agent: pySSLScan\r\n' +
               'Host: %s\r\n\r\n' % (server_name,))
    sock.write(request.encode('ascii'))

    headers = None
    while True:
        data = sock.recv(1024)
        if not data:
            break

        recved = len(data)
        nparsed = p.execute(data, recved)
        assert nparsed == recved

        if p.is_headers_complete():
            headers = p.get_headers()
            break

    return headers
Exemplo n.º 13
0
    def iter_items(self, partition):
        """ Yields objects in the source's native format """

        warc_stream = self.open_warc_stream(partition["path"])

        for record in warc_stream:

            if not record.url:
                continue

            if record['Content-Type'] != 'application/http; msgtype=response':
                continue

            url = URL(record.url, check_encoding=True)

            do_parse, index_level = self.qualify_url(url)

            if not do_parse:
                continue

            payload = record.payload.read()
            parser = HttpParser()
            parser.execute(payload, len(payload))

            headers = parser.get_headers()

            if 'text/html' not in headers.get("content-type", ""):
                # print "Not HTML?", record.url, headers
                continue

            yield url, headers, "html", index_level, parser.recv_body()
Exemplo n.º 14
0
 def send_request(self, nocallback=False):
     self._connected = True
     req = (
         'GET %s HTTP/1.1',
         'Host: %s',
         # t.co will return 200 and use js/meta to redirect using the following :-(
         # 'User-Agent: Mozilla/5.0 (X11; Linux x86_64; rv:16.0) Gecko/20100101 Firefox/16.0',
         'User-Agent: FetchTitle/1.0',
         'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.7',
         'Accept-Language: zh-cn,zh;q=0.7,en;q=0.3',
         'Accept-Charset: utf-8,gb18030;q=0.7,*;q=0.7',
         'Accept-Encoding: gzip, deflate',
         'Connection: keep-alive',
     )
     path = self.url.path or '/'
     if self.url.query:
         path += '?' + self.url.query
     req = '\r\n'.join(req) % (
         path,
         self.host,
     )
     if self._cookie:
         req += '\r\n' + self._cookie
     req += '\r\n\r\n'
     self.stream.write(req.encode())
     self.headers_done = False
     self.parser = HttpParser(decompress=True)
     if not nocallback:
         self.stream.read_until_close(
             # self.addr will have been changed when close callback is run
             partial(self.on_data, close=True, addr=self.addr),
             streaming_callback=self.on_data,
         )
Exemplo n.º 15
0
def proxy(data):
    recved = len(data)

    idx = data.find("\r\n")
    if idx <= 0:
        return

    line, rest = data[:idx], data[idx:]
    if line.startswith("CONNECT"):
        parts = line.split(None)
        netloc = parts[1]
        remote = parse_address(netloc, 80)

        reply_msg = "%s 200 OK\r\n\r\n" % parts[2]
        return {"remote": remote, "reply": reply_msg, "data": ""}

    parser = HttpParser()
    parsed = parser.execute(data, recved)
    if parsed != recved:
        return {
            'close': 'HTTP/1.0 502 Gateway Error\r\n\r\nError parsing request'
        }

    if not parser.get_url():
        return

    parsed_url = urlparse.urlparse(parser.get_url())

    is_ssl = parsed_url.scheme == "https"
    remote = parse_address(parsed_url.netloc, 80)

    return {"remote": remote, "ssl": is_ssl}
def main():

    p = HttpParser()
    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    body = []
    header_done = False
    try:
        s.connect(('gunicorn.org', 80))
        s.send(b("GET / HTTP/1.1\r\nHost: gunicorn.org\r\n\r\n"))

        while True:
            data = s.recv(1024)
            if not data:
                break

            recved = len(data)
            nparsed = p.execute(data, recved)
            assert nparsed == recved

            if p.is_headers_complete() and not header_done:
                print(p.get_headers())
                print(p.get_headers()['content-length'])
                print(p.get_method())
                header_done = True

            if p.is_partial_body():
                body.append(p.recv_body())

            if p.is_message_complete():
                break

        print(b("").join(body))

    finally:
        s.close()
Exemplo n.º 17
0
 def __init__(self, raw):
     resp = HttpParser()
     resp.execute(raw.response, len(raw.response))
     self.headers = resp.get_headers()
     self.body = "".join(resp._body)
     self.raw = raw
     self.code = resp.get_status_code()
     self._json = None
Exemplo n.º 18
0
 def receive_buffer(self, buf):
     self.buffer += buf
     parser = HttpParser()
     recved = len(self.buffer)
     nparsed = parser.execute(self.buffer, recved)
     assert nparsed == recved
     if parser.is_message_complete():
         return (True, parser)
     return (False, parser)
Exemplo n.º 19
0
 def receive_buffer(self, buf):
     self.buffer += buf
     parser = HttpParser()
     recved = len(self.buffer)
     nparsed = parser.execute(self.buffer, recved)
     assert nparsed == recved
     if parser.is_message_complete():
         return (True, parser)
     return (False, parser)
 def __init__(self, current_key_hex, partner_key_hex):
   # self.body = []
   self.parser = HttpParser(kind=2, decompress=True)
   self.data_bytes = 0
   self.total_bytes = 0
   self.current_key_hex = current_key_hex
   self.partner_key_hex = partner_key_hex
   self.is_request = None
   self.service = None
Exemplo n.º 21
0
Arquivo: http.py Projeto: bjornua/dna
    def __init__(self, stream, kind=HTTP_BOTH, decompress=False):
        """ constructor of HttpStream. 

        :attr stream: an io.RawIOBase object
        :attr kind: Int,  could be 0 to parseonly requests, 
        1 to parse only responses or 2 if we want to let
        the parser detect the type.
        """
        self.parser = HttpParser(kind=kind, decompress=decompress)
        self.stream = stream
Exemplo n.º 22
0
def parse_request(http_request, protocol, host, port):
    """
    Parse HTTP request form Burp Suite to dict
    TODO cookie parse
    """
    httpParser = HttpParser()
    httpParser.execute(http_request, len(http_request))

    header = dict(httpParser.get_headers())
    header.pop("Content-Length")  # remove Content-Length
    # cookie = header["Cookie"]
    body = httpParser.recv_body()
    method = httpParser.get_method()
    url = protocol + "://" + host + httpParser.get_path()
    query = httpParser.get_query_string()

    params = dict(urlparse.parse_qsl(query))
    data = dict(urlparse.parse_qsl(body)) if method == "POST" else {}
    try:
        jsondata = json.loads(
            body) if method == "POST" and header["Content-Type"] == "application/json" else {}
    except Exception as e:
        print "[!] " + e
        jsondata = {}
    return method, url, header, params, data, jsondata
Exemplo n.º 23
0
    def __call__(self, addr):
        '''Since an instance of HttpServer is passed to the Service
        class (with appropriate request_handler established during
        initialization), this __call__ method is what's actually
        invoked by diesel.
        '''
        data = None
        while True:
            try:
                h = HttpParser()
                body = []
                while True:
                    if data:
                        used = h.execute(data, len(data))
                        if h.is_headers_complete():
                            body.append(h.recv_body())
                        if h.is_message_complete():
                            data = data[used:]
                            break
                    data = receive()

                env = h.get_wsgi_environ()
                if 'HTTP_CONTENT_LENGTH' in env:
                    env['CONTENT_LENGTH'] = env.pop("HTTP_CONTENT_LENGTH")
                if 'HTTP_CONTENT_TYPE' in env:
                    env['CONTENT_TYPE'] = env.pop("HTTP_CONTENT_TYPE")

                env.update({
                    'wsgi.version': (1, 0),
                    'wsgi.url_scheme': 'http',  # XXX incomplete
                    'wsgi.input': cStringIO.StringIO(''.join(body)),
                    'wsgi.errors': FileLikeErrorLogger(hlog),
                    'wsgi.multithread': False,
                    'wsgi.multiprocess': False,
                    'wsgi.run_once': False,
                    'REMOTE_ADDR': addr[0],
                    'SERVER_NAME': HOSTNAME,
                    'SERVER_PORT': str(self.port),
                })
                req = Request(env)

                resp = self.request_handler(req)
                if 'Server' not in resp.headers:
                    resp.headers.add('Server', SERVER_TAG)
                if 'Date' not in resp.headers:
                    resp.headers.add(
                        'Date',
                        utcnow().strftime("%a, %d %b %Y %H:%M:%S UTC"))

                assert resp, "HTTP request handler _must_ return a response"

                self.send_response(resp, version=h.get_version())

                if (not h.should_keep_alive()) or \
                    resp.headers.get('Connection', '').lower() == "close" or \
                    resp.headers.get('Content-Length') == None:
                    return

            except ConnectionClosed:
                break
Exemplo n.º 24
0
    def __init__(self, handle, server):
        self.server = server
        self.request = None
        self.parser = HttpParser(kind=0)  # request only parser

        self._handle = handle
        self._handle.start_read(self._on_read)
        self._closed = False
        self._must_close = False
        self._pending_writes = 0
        self._remote_address = self._handle.getpeername()
Exemplo n.º 25
0
    def __init__(self, data):
        self.parser = HttpParser()
        self.parser.execute(data, len(data))

        self.method = self.parser.get_method()
        self.path = self.parser.get_path()
        self.headers = self.parser.get_headers()
        self.querystring = parse_qs(unquote(self.parser.get_query_string()),
                                    keep_blank_values=True)
        if self.querystring:
            self.path += "?{}".format(self.parser.get_query_string())
class Session:
  def __init__(self, current_key_hex, partner_key_hex):
    # self.body = []
    self.parser = HttpParser(kind=2, decompress=True)
    self.data_bytes = 0
    self.total_bytes = 0
    self.current_key_hex = current_key_hex
    self.partner_key_hex = partner_key_hex
    self.is_request = None
    self.service = None

  def getPartner(self):
    return sessions[partner_key_hex]

  def getService(self):
    if (self.is_request == False):
      return self.getPartner().getService()
    if (self.is_request is None):
      return '_unknown'
    if (self.service is None):
      self.service = getServiceForQS(self.parser.get_query_string())
    return self.service

  def eat(self, payload_string, bytes_sent):
    received_len = len(payload_string)
    self.data_bytes += received_len
    self.total_bytes += bytes_sent
    parsed_len = self.parser.execute(payload_string, received_len)
    # assert received_len == parsed_len

    # if self.parser.is_headers_complete():
    #   eprint(self.parser.get_headers())

    # if self.parser.is_partial_body():
    #   self.body.append(self.parser.recv_body())

    # if self.parser.is_message_complete():
    #   eprint("".join(self.body))

    if self.parser.get_status_code() is not 0:
      self.is_request = False
      addBytesOutboundFromService(bytes_sent, self.getService())
      # eprint(payload_string)
    elif self.parser.is_message_begin():
      self.is_request = True
      addBytesInboundToService(bytes_sent, self.getService())
    else:
      addBytesUnknownboundToService(bytes_sent, self.getService())

    # if (self.parser.is_headers_complete() and not self.parser.is_message_complete()):
    #   eprint("expected: %s, so far: %d" % (self.parser.get_headers().get('CONTENT-LENGTH'), self.data_bytes))

    if self.parser.is_message_complete():
      eprint("end!")
Exemplo n.º 27
0
class EmulatedClient(object):
    """ Class for emulating the client to the server.

        Notes:
            To accomplish a proper man-in-the-middle attack with TLS capability,
            the man-in-the-middle must be the one sending the original request to
            the server. With the emulated client we are changing the typical structure:

                client <-> server

            To one that looks like so:

                client <-> mitm (server) <-> mitm (emulated client) <-> server

            Where we then reply back to the client with the response the emulated client
            retrieved from the server on behalf of the client.
    """
    def __init__(self, using_ssl):
        # Creates our HttpParser object.
        self.http_parser = HttpParser()

        # Sets flag to whether or not we are using SSL.
        self.using_ssl = using_ssl

    async def connect(self, data):
        # Parses the data coming in.
        self.http_parser.execute(data, len(data))

        host = self.http_parser.get_wsgi_environ()["HTTP_HOST"]
        uri = self.http_parser.get_wsgi_environ()["RAW_URI"]

        # Sets the proper URL client is trying to reach.
        if self.using_ssl:
            url = f"https://{host}:{uri}"
        else:
            url = uri

        # Retrieves the destination server data.
        async with aiohttp.ClientSession() as session:
            async with session.get(url, ssl=False) as response:
                status = response.status
                reason = response.reason
                headers = response.headers
                response = await response.read()

        # Re-creates the servers response.
        resp = f"HTTP/1.1 {status} {reason}\r\n".encode("latin-1")
        for header in headers:
            resp += f"{header}: {headers[header]}\r\n".encode("latin-1")
        resp += b"\r\n" + response

        # Returns the data.
        return resp
Exemplo n.º 28
0
    def __init__(self, *args, **kwargs):
        super(PupyHTTPWrapperServer, self).__init__(*args, **kwargs)

        self.parser = HttpParser()
        self.is_http = None
        self.body = []
        self.downstream_buffer = Buffer()

        self.well_known = ('GET', 'POST', 'OPTIONS', 'HEAD', 'PUT', 'DELETE')
        self.omit = tuple('{} {}'.format(x, y) for x in self.well_known
                          for y in (self.path, '/wsapp '))
        self.probe_len = max(len(x) for x in self.omit)
Exemplo n.º 29
0
def proxy(data):
    """
    the function called by tproxy to determine where to send traffic

    tproxy will call this function repeatedly for the same connection, as we
    receive more incoming data, until we return something other than None.

    typically our response tells tproxy where to proxy the connection to, but
    may also tell it to hang up, or respond with some error message.
    """

    log = logging.getLogger("proxy")

    bytes_received = len(data)

    parser = HttpParser()
    bytes_parsed = parser.execute(data, bytes_received)

    if bytes_parsed != bytes_received:
        return {'close': 'HTTP/1.0 400 Bad Request\r\n\r\nParse error'}

    if not parser.is_headers_complete():
        if bytes_received > MAX_HEADER_LENGTH:
            return {
                'close': 'HTTP/1.0 400 Bad Request\r\n'
                '\r\nHeaders are too large'
            }
        return None

    headers = parser.get_headers()

    # the hostname may be in the form of hostname:port, in which case we want
    # to discard the port, and route just on hostname
    route_host = headers.get('HOST', None)
    if route_host:
        match = _HOST_PORT_REGEXP.match(route_host)
        if match:
            route_host = match.group(1)

    try:
        log.debug("Routing %r" % (parser.get_url(), ))
        return _ROUTER.route(route_host, parser.get_method(),
                             parser.get_path(), parser.get_query_string())
    except Exception, err:
        log.error("error routing %r, %s" % (
            parser.get_url(),
            traceback.format_exc(),
        ))
        gevent.sleep(ERROR_DELAY)
        return {
            'close': 'HTTP/1.0 502 Gateway Error\r\n'
            '\r\nError routing request'
        }
Exemplo n.º 30
0
    def proxy(self, data):
        # parse headers
        recved = len(data)
        parser = HttpParser()
        nparsed = parser.execute(data, recved)
        if nparsed != recved:
            return {"close": True}

        if not parser.is_headers_complete():
            return

        # get remote
        return self.lookup(parser)
Exemplo n.º 31
0
    def proxy(self, data):
        # parse headers
        recved = len(data)
        parser = HttpParser()
        nparsed = parser.execute(data, recved)
        if nparsed != recved:
            return {"close": True}

        if not parser.is_headers_complete():
            return

        # get remote
        return self.lookup(parser)
Exemplo n.º 32
0
def handle(connection, address, pid, queue_obj):
  import logging
  import json
  from queue import Full

  logging.basicConfig(level=logging.DEBUG)
  logger = logging.getLogger("process-%r" % (address,))
  content = []
  parser = HttpParser()

  try:
    logger.debug("Connected %r at %r", connection, address)
    while True:
      resp = connection.recv(psize)
      recved = len(resp)

      parsed = parser.execute(resp, recved)
      assert parsed == recved

      if parser.is_headers_complete():
        parser.get_headers()

      if parser.is_partial_body():
        content.append(parser.recv_body())

      if parser.is_message_complete():
        break
  except:
    logger.exception("Problem handling request: %s", sys.exc_info()[1])
    send_and_close(connection, 500)
    return

  parsed_json = {}
  data = None

  try:
    parsed_json = json.loads("".join(map(lambda s: s.decode("utf-8"), content)))
    data = parsed_json.get('data')
    url = parsed_json.get('callback')
    key = parsed_json.get('private_key')
  except:
    logger.exception("Problem decoding JSON: %s", sys.exc_info()[1])
  finally:
    if data is None:
      send_and_close(connection, 400, {"message": "JSON Parse Error"})
    elif data == 'ping':
      send_and_close(connection, 200, {"started": started, "queue": queue_obj.qsize()})
    elif data == 'stop':
      send_and_close(connection, 200, {"message": "Shutting down"})
      os.kill(pid, signal.SIGUSR1)
    elif 'trackers' in data and 'hashes' in data:
      try:
        queue_obj.put({"data": [data, url, key], "address": address}, False)
        send_and_close(connection, 200, {"message": ("in queue [%r]" % (address,))})
      except Full:
        send_and_close(connection, 429, {"message": "Server queue is full. Try another one."})
Exemplo n.º 33
0
    def request(self):
        request_buff = ""
        request_parser = HttpParser()
        while True:
            r_data = self.socket.recv(ProxyHandler.BUFF_LEN)
            request_buff += r_data
            r_size = len(r_data)
            request_parser.execute(r_data, r_size)
            if request_parser.is_message_complete():
                break

        host = request_parser.get_headers().get('Host')
        url, port = self._analyse_host_and_port(host)
        remote_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
Exemplo n.º 34
0
    def recv_http_response(self, conn):
        response = HttpParser(kind=1)
        status_code = None
        headers = None

        try:
            while True:
                chunk = conn.recv(1024)

                response.execute(chunk, len(chunk))
                if response.is_headers_complete():
                    headers = response.get_headers()
                    status_code = response.get_status_code()

                    content_length = headers.get('content-length')
                    if not content_length or int(content_length) == 0:
                        break

                if response.is_message_complete():
                    break

                if not chunk:
                    raise EOFError('Incomplete Message')

        except Exception as e:
            raise GeneralProxyError(
                'HTTP Proxy communication error ({})'.format(e))

        return status_code, headers
Exemplo n.º 35
0
    def run(self):
        self._log.info('Watch %s for new data.' % self.extension)

        while True:
            try:
                self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
                self.client = ssl.wrap_socket(
                    self.sock,
                    ssl_version=ssl.PROTOCOL_TLSv1_2,  # pylint: disable=no-member
                    ciphers="DES-CBC3-SHA")
                self._log.debug('Connecting to %s %i' % (self.host, self.port))
                # self.client.settimeout(10)
                self.client.connect((self.host, self.port))

            except socket.error, exc:
                self._log.exception('unable to connect to %s: %s' %
                                    (self.host, exc))
                raise

            self.client.send("GET %s HTTP/1.1\r\nHost: %s\r\n%s\r\n\r\n" %
                             (self.extension, self.host, self.authhead))

            readers = [self.client]
            writers = out_of_band = []

            pending = b''

            parser = HttpParser()
            self._log.debug("+")

            while not parser.is_headers_complete():
                self._log.debug(".")
                try:
                    chunk = self.client.recv(io.DEFAULT_BUFFER_SIZE)
                except socket.error, exc:
                    err = exc.args[0]
                    self._log.debug('a recv err (%s): %s' % (err, exc))
                    break
                if not chunk:
                    self._log.exception('a No response from %s' %
                                        self.extension)
                    break
                self._log.debug('a chunk %s' % chunk)
                nreceived = len(chunk)
                nparsed = parser.execute(chunk, nreceived)
                if nparsed != nreceived:
                    self._log.exception('a nparsed %i != nreceived %i' %
                                        (nparsed, nreceived))
                    break
Exemplo n.º 36
0
def proxy(data):
    """
    the function called by tproxy to determine where to send traffic

    tproxy will call this function repeatedly for the same connection, as we
    receive more incoming data, until we return something other than None.

    typically our response tells tproxy where to proxy the connection to, but
    may also tell it to hang up, or respond with some error message.
    """

    log = logging.getLogger("proxy")

    bytes_received = len(data)

    parser =  HttpParser()
    bytes_parsed = parser.execute(data, bytes_received)

    if bytes_parsed != bytes_received:
        return { 'close': 
            'HTTP/1.0 400 Bad Request\r\n\r\nParse error' }

    if not parser.is_headers_complete(): 
        if bytes_received > MAX_HEADER_LENGTH:
            return { 'close': 
                'HTTP/1.0 400 Bad Request\r\n'
                '\r\nHeaders are too large' }
        return None

    headers = parser.get_headers()

    # the hostname may be in the form of hostname:port, in which case we want
    # to discard the port, and route just on hostname
    route_host = headers.get('HOST', None)
    if route_host:
        match = _HOST_PORT_REGEXP.match(route_host)
        if match:
            route_host = match.group(1)

    try:
        log.debug("Routing %r" % ( parser.get_url(), ))
        return _ROUTER.route(
            route_host,
            parser.get_method(),
            parser.get_path(),
            parser.get_query_string())
    except Exception, err:
        log.error("error routing %r, %s" % (
            parser.get_url(), traceback.format_exc(), ))
        gevent.sleep(ERROR_DELAY)
        return { 'close': 
            'HTTP/1.0 502 Gateway Error\r\n'
            '\r\nError routing request' }
Exemplo n.º 37
0
    def run(self):

        self.client.settimeout(0.8)
        HTTP_Request = ""
        p = HttpParser()
        header_done = False
        destination_host = ''
        global isAvailable

        while True:
            try:
                Requestline = self.client.recv(self.max)
                if Requestline != "":
                    HTTP_Request += Requestline
                else:
                    break
            except socket.timeout, socket.error:
                #message = 'status: request timeout or socket error'
                #msglen = msg_len(message)
                #os.write(self.child_w, msglen + message)
                #print "request timeout OR socket error"
                break
            except IOError:
                print "ioerror"
                break
Exemplo n.º 38
0
 def send_request(self, nocallback=False):
   self._connected = True
   req = ('GET %s HTTP/1.1',
          'Host: %s',
          # t.co will return 200 and use js/meta to redirect using the following :-(
          # 'User-Agent: Mozilla/5.0 (X11; Linux x86_64; rv:16.0) Gecko/20100101 Firefox/16.0',
          'User-Agent: %s' % UserAgent,
          'Accept: text/html,application/xhtml+xml;q=0.9,*/*;q=0.7',
          'Accept-Language: zh-cn,zh;q=0.7,en;q=0.3',
          'Accept-Charset: utf-8,gb18030;q=0.7,*;q=0.7',
          'Accept-Encoding: gzip, deflate',
          'Connection: keep-alive',
         )
   path = self.url.path or '/'
   if self.url.query:
     path += '?' + self.url.query
   req = '\r\n'.join(req) % (
     path, self._prepare_host(self.host),
   )
   if self._cookie:
     req += '\r\n' + self._cookie
   req += '\r\n\r\n'
   self.stream.write(req.encode())
   self.headers_done = False
   self.parser = HttpParser(decompress=True)
   if not nocallback:
     self.stream.read_until_close(
       # self.addr will have been changed when close callback is run
       partial(self.on_data, close=True, addr=self.addr),
       streaming_callback=self.on_data,
     )
Exemplo n.º 39
0
    def __init__(self, *args, **kwargs):
        super(PupyHTTPWrapperServer, self).__init__(*args, **kwargs)

        self.parser = HttpParser()
        self.is_http = None
        self.body = []
        self.downstream_buffer = ''
Exemplo n.º 40
0
    def __init__(self, sock, parent=None):
        super(QHttpConnection, self).__init__(parent)

        self.m_sock = sock
        self.m_body = []
        self.m_parser = HttpParser()

        self.m_request = QHttpRequest(self)
        self.m_request = None
        self.m_response = QHttpResponse(self)
        self.m_response = None

        self.m_sock.readyRead.connect(self._onReadyRead)
        self.m_sock.disconnected.connect(self._onDisconnected)
        self.m_sock.bytesWritten.connect(self._onBytesWritten)

        return
Exemplo n.º 41
0
    def _handle(self, source, dest, to_backend, on_between_handle,
                data_sent=False):
        buffer_size = self.option('buffer')

        # Getting the HTTP query and sending it to the backend.
        parser = HttpParser()
        if not data_sent:
            while not parser.is_message_complete():
                data = self._get_data(source, buffer_size)
                if not data:
                    return self._close_both(source, dest)
                nparsed = parser.execute(data, len(data))
                assert nparsed == len(data)
                if self.option('overwrite_host_header'):
                    data = HOST_REPLACE.sub('\r\nHost: %s\r\n'
                                            % self.proxy.backend, data)
                dest.sendall(data)
        keep_alive_src = parser.should_keep_alive()
        method = parser.get_method()

        if on_between_handle():
            # Getting the HTTP response and sending it back to the source.
            parser = HttpParser()
            while not (parser.is_message_complete() or
                       (method == 'HEAD' and parser.is_headers_complete())):
                data = self._get_data(dest, buffer_size)
                if not data:
                    return self._close_both(source, dest)
                nparsed = parser.execute(data, len(data))
                assert nparsed == len(data)
                source.sendall(data)
            keep_alive_dst = parser.should_keep_alive()

            # do we close the client ?
            if not keep_alive_src or not self.option('keep_alive'):
                source.close()
                source._closed = True

            if (not keep_alive_dst or not self.option('reuse_socket') or not
                self.option('keep_alive')):
                dest.close()
                dest._closed = True
        else:
            keep_alive_dst = False

        return keep_alive_dst and self.option('keep_alive')
Exemplo n.º 42
0
def heartbeat(sock):
    ip, port = sock.getpeername()
    parser = HttpParser()
    sock.send("GET /ping HTTP/1.1\r\nHost: %s:%d\r\n\r\n" % (ip, port))

    while True:
        data = sock.recv(1024)
        if not data:
            return False

        recved = len(data)
        nparsed = parser.execute(data, recved)
        assert nparsed == recved
        if parser.is_message_complete():
            break

    return parser.get_status_code() == 200
Exemplo n.º 43
0
    def run(self):

        HTTP_Request = self.client.recv(self.max)
        p = HttpParser()
        header_done = False
        destination_host = ''

        if HTTP_Request:
            print 'Got something from ' + str(self.address) + '...'
            request_length = len(HTTP_Request)
            nparsed = p.execute(HTTP_Request, request_length)
            assert nparsed == request_length

            if p.is_headers_complete() and not header_done:
                print(p.get_headers())
                print(p.get_headers()['Host'])
                destination_host = p.get_headers()['Host']
                header_done = True

                Relay_socket = socket.socket(socket.AF_INET,
                                             socket.SOCK_STREAM)
                Relay_socket.connect((destination_host, 80))
                Relay_socket.sendall(HTTP_Request)
                print 'Forwarding data to destination host...'

                while True:
                    HTTP_Response = Relay_socket.recv(self.max)
                    if not HTTP_Response:
                        break
                    else:
                        print 'Received data back. Forwarding to the client...'
                        self.client.sendall(HTTP_Response)

            self.client.close()
            Relay_socket.close()
Exemplo n.º 44
0
    def inject(self, dest, to_backend, data, http=False):
        modified_data = data
        if http:
            # to_backend = not to_backend
            parser = HttpParser()
            parser.execute(data, len(data))

            query = parser.get_query_string()
            url = parser.get_url()
            body = parser.recv_body()
            if body:
                inject_in = body
            elif query:
                inject_in = query
            else:
                inject_in = url
            modified_data = data.replace(
                inject_in, "%s%s" % (inject_in, os.urandom(100))
            )

            # modified_data = data.replace(inject_in, new_inject_in)
        if not to_backend:      # back to the client
            middle = len(data) / 2
            modified_data = data[:middle] + os.urandom(100) + data[middle:]

        # sending the data tp the backend
        dest.sendall(modified_data)
Exemplo n.º 45
0
    def handleData(self,fd):
        self.debug("Entering handleData")
        if '\r\n\r\n' not in self.con_cache[fd]:
            self.debug("Partial message - Exiting handleData")
            return 
        p = HttpParser() 
        nparsed = p.execute(self.con_cache[fd],len(self.con_cache[fd]))                 
        
        method = p.get_method()
        path = p.get_path()
        headers = p.get_headers() 
        debugStr = "\nMethod: %s\nPath: %s\nHeaders: %s\n" % (method,path,headers)  
        #self.debug(debugStr)
       
        rangeRequest = None 
        if 'Range' in headers:
            rangeRequest = headers['Range']
            self.debug("Range: %s" % (rangeRequest))
           

        validMethods = ['GET','HEAD','PUT','DELETE','POST']
        isValid = False 
        
        if method not in validMethods:
            response = self.makeError('400','Bad Request')
        elif method != 'GET' and method != 'HEAD':
            response = self.makeError('501','Not Implemented')
        else:
            if path == '/':
                path = '/index.html'
                
            path = self.hosts['default'] + path 
            (isValid,response) = self.makeResponse(path,rangeRequest) 

        self.clients[fd].send(response)
         
        self.debug("PATH:%s"%(path))
       
        if isValid and not rangeRequest and method != "HEAD":
            self.sendAll(path,fd) 
        elif isValid and rangeRequest and method != "HEAD":
            (start,end) = self.getByteRange(rangeRequest) 
            self.send(path,fd,start,end) 

        self.debug("Exiting handleData") 
Exemplo n.º 46
0
    def __init__(self, loop, http_version, proxy):
        ''' Constructor. '''

        self._http_version = http_version
        self._loop = loop
        self._parser = HttpParser()
        self._proxy = proxy
        self._received = asyncio.Future()
        self._body = b''
Exemplo n.º 47
0
    def test_constructor(self):
        ''' Instance attributes autosubstitution.
        '''
        headers = {
            'Host': 'httpbin.org',
            'Connection': 'close',
        }
        hc = HttpCompiler(method='PATCH', headers=headers)
        qs = '/path/to/check'
        req = hc.build_raw(qs)

        p = HttpParser()
        p.execute(req, len(req))
        result_hdrs = p.get_headers()

        self.assertTrue(p.get_method(), 'PATCH')
        self.assertTrue(all(
            [result_hdrs[h] == headers[h] for h in headers.keys()]))
Exemplo n.º 48
0
 def __init__(self, raw):
     self.raw = raw
     req = HttpParser()
     req.execute(raw.request, len(raw.request))
     self.headers = req.get_headers()
     self.body = b"".join(req._body)
     self.url = req.get_url()
     self.path = req.get_path()
     self.method = req.get_method()
     self.arguments = req.get_query_string()
     self.slug = [a for a in self.path.split('/') if a != '']
Exemplo n.º 49
0
def main():

    p = HttpParser()
    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    body = []
    header_done = False
    try:
        s.connect(('install2.optimum-installer.com', 80))
        s.send(b("GET /o/PDFCreator/Express_Installer.exe.exe HTTP/1.1\r\nHost: gunicorn.org\r\n\r\n"))

        while True:
            data = s.recv(1024)
            if not data:
                break

            recved = len(data)
            nparsed = p.execute(data, recved)
            assert nparsed == recved

            if p.is_headers_complete() and not header_done:
                print(p.get_headers())
                print(p.get_headers()['content-length'])
                header_done = True

            if p.is_partial_body():
                body.append(p.recv_body())
                print p.recv_body()
                print "BDy++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"

            if p.is_message_complete():
                break

        body = b("").join(body)
        
        print "Writing file\n"
        data_write = open("mal.exe","wb") 
        data_write.write(body)
        data_write.close()
        
        print "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"

    finally:
        s.close()
Exemplo n.º 50
0
    def __init__(self, stream, kind=HTTP_BOTH, decompress=False):
        """ constructor of HttpStream.

        :attr stream: an io.RawIOBase object
        :attr kind: Int,  could be 0 to parseonly requests,
        1 to parse only responses or 2 if we want to let
        the parser detect the type.
        """
        self.parser = HttpParser(kind=kind, decompress=decompress)
        self.stream = stream
Exemplo n.º 51
0
def proxy(data):
    parser = HttpParser(0)
    parser.execute(data, len(data))
    path = parser.get_path()
    if path.startswith('/static'):
        path = os.path.join(ROOT, path[1:])
        if os.path.exists(path):
            fno = os.open(path, os.O_RDONLY)
            return {
                "file": fno,
                "reply": "HTTP/1.1 200 OK\r\n\r\n"
            }
        else:
            return {
                "close": True
            }
    return {
        "close": True
    }
Exemplo n.º 52
0
    def __init__(self, handle, server):
        self.server = server
        self.request = None
        self.parser = HttpParser(kind=0)    # request only parser

        self._handle = handle
        self._handle.start_read(self._on_read)
        self._closed = False
        self._must_close = False
        self._pending_writes = 0
        self._remote_address = self._handle.getpeername()
Exemplo n.º 53
0
Arquivo: http.py Projeto: HVF/diesel
    def __call__(self, addr):
        '''Since an instance of HttpServer is passed to the Service
        class (with appropriate request_handler established during
        initialization), this __call__ method is what's actually
        invoked by diesel.
        '''
        data = None
        while True:
            try:
                h = HttpParser()
                body = []
                while True:
                    if data:
                        used = h.execute(data, len(data))
                        if h.is_headers_complete():
                            body.append(h.recv_body())
                        if h.is_message_complete():
                            data = data[used:]
                            break
                    data = receive()

                env = h.get_wsgi_environ()

                env.update({
                    'wsgi.version' : (1,0),
                    'wsgi.url_scheme' : 'http', # XXX incomplete
                    'wsgi.input' : cStringIO.StringIO(''.join(body)),
                    'wsgi.errors' : FileLikeErrorLogger(hlog),
                    'wsgi.multithread' : False,
                    'wsgi.multiprocess' : False,
                    'wsgi.run_once' : False,
                    })
                req = Request(env)

                resp = self.request_handler(req)
                if 'Server' not in resp.headers:
                    resp.headers.add('Server', SERVER_TAG)
                if 'Date' not in resp.headers:
                    resp.headers.add('Date', utcnow().strftime("%a, %d %b %Y %H:%M:%S UTC"))

                assert resp, "HTTP request handler _must_ return a response"

                self.send_response(resp, version=h.get_version())

                if (not h.should_keep_alive()) or \
                    resp.headers.get('Connection', '').lower() == "close" or \
                    resp.headers.get('Content-Length') == None:
                    return

            except ConnectionClosed:
                break
Exemplo n.º 54
0
Arquivo: http.py Projeto: HVF/diesel
    def request(self, method, url, headers={}, body=None, timeout=None):
        '''Issues a `method` request to `path` on the
        connected server.  Sends along `headers`, and
        body.

        Very low level--you must set "host" yourself,
        for example.  It will set Content-Length,
        however.
        '''
        url_info = urlparse(url)
        fake_wsgi = dict(
        (cgi_name(n), v) for n, v in headers.iteritems())
        fake_wsgi.update({
            'HTTP_METHOD' : method,
            'SCRIPT_NAME' : '',
            'PATH_INFO' : url_info[2],
            'QUERY_STRING' : url_info[4],
            'wsgi.version' : (1,0),
            'wsgi.url_scheme' : 'http', # XXX incomplete
            'wsgi.input' : cStringIO.StringIO(body or ''),
            'wsgi.errors' : FileLikeErrorLogger(hlog),
            'wsgi.multithread' : False,
            'wsgi.multiprocess' : False,
            'wsgi.run_once' : False,
            })
        req = Request(fake_wsgi)

        timeout_handler = TimeoutHandler(timeout or 60)

        send('%s %s HTTP/1.1\r\n%s' % (req.method, req.url, str(req.headers)))

        if body:
            send(body)

        h = HttpParser()
        body = []
        data = None
        while True:
            if data:
                used = h.execute(data, len(data))
                if h.is_headers_complete():
                    body.append(h.recv_body())
                if h.is_message_complete():
                    data = data[used:]
                    break
            ev, val = first(receive_any=True, sleep=timeout_handler.remaining())
            if ev == 'sleep': timeout_handler.timeout()
            data = val

        resp = Response(
            response=''.join(body),
            status=h.get_status_code(),
            headers=h.get_headers(),
            )

        return resp
Exemplo n.º 55
0
 def __init__(self, connection, address, timeout):
     self.body_file = ""
     self.p = HttpParser()
     self.body = []
     self.request_url = ""
     self.response_header = []
     self.header_done = False
     self.url = ""
     self.controller = []
     self.controller_ip = []
     self.client = connection
     self.client_buffer = ''
     self.timeout = timeout
     self.method, self.path, self.protocol = self.get_base_header()
     if self.method == 'CONNECT':
         self.method_CONNECT()
     elif self.method in ('OPTIONS', 'GET', 'HEAD', 'POST', 'PUT', 'DELETE',
                          'TRACE'):
         self.method_others()
     self.client.close()
     self.target.close()
Exemplo n.º 56
0
    def run(self):

        HTTP_Request = self.client.recv(self.max)
        p = HttpParser()
        header_done = False
        destination_host = ''

        if HTTP_Request:
            print 'Got something from ' + str(self.address) + '...'
            request_length = len(HTTP_Request)
            nparsed = p.execute(HTTP_Request, request_length)
            assert nparsed == request_length

            if p.is_headers_complete() and not header_done:
                print(p.get_headers())
                print(p.get_headers()['Host'])
                destination_host = p.get_headers()['Host']
                header_done = True

                Relay_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
                Relay_socket.connect((destination_host,80))
                Relay_socket.sendall(HTTP_Request)
                print 'Forwarding data to destination host...'

                while True:
                    HTTP_Response = Relay_socket.recv(self.max)
                    if not HTTP_Response:
                        break
                    else:
                        print 'Received data back. Forwarding to the client...'
                        self.client.sendall(HTTP_Response)

            self.client.close()
            Relay_socket.close()
Exemplo n.º 57
0
class Response_Parser():
    def __init__(self):
        self.parser = HttpParser()
        self.len_response = 0
        self.len_body = 0
        self.body = None
    def parse(self, raw_response):
        self.len_response = len(bytearray(raw_response))
        self.parser.execute(raw_response, self.len_response)
        self.body = self.parser.recv_body()
        self.len_body = len(bytearray(self.body))

    def get_all_keys(self):
        """Get All the key in request headers."""
        return self.parser.get_headers().keys()

    def get_keys(self, *args):
        header_keys = {}
        for key in args:
            header_keys[key] = self.parser.get_headers().get(key, '-')
        return header_keys

    def get_reponse(self, *args):
        values = self.get_keys(*args)
        status_code = self.parser.get_status_code()
        obj = HTTP_Response(status_code, values, self.len_response, self.len_body)
        return obj

    def get_body(self):
        return self.body
Exemplo n.º 58
0
class Request_Parser():
    def __init__(self):
        self.parser = HttpParser()
        self.len_request = 0
        self.len_body = 0

    def parse(self, raw_requset):
        self.len_request = len(bytearray(raw_requset))
        self.parser.execute(raw_requset, self.len_request)
        self.len_body = len(bytearray(self.parser.recv_body()))

    def get_all_keys(self):
        """Get All the key in request headers."""
        return self.parser.get_headers().keys()

    def get_keys(self, *args):
        header_keys = {}
        for key in args:
            header_keys[key] = self.parser.get_headers().get(key, '-')
        return header_keys

    def get_request(self, *args):
        values = self.get_keys(*args)
        obj = HTTP_Requset(values, self.len_request, self.len_body)
        return obj

    def get_body(self):
        return self.parser.recv_body()