示例#1
0
    def _handle(self,
                source,
                dest,
                to_backend,
                on_between_handle,
                data_sent=False):
        buffer_size = self.option('buffer')

        # Getting the HTTP query and sending it to the backend.
        parser = HttpParser()
        if not data_sent:
            while not parser.is_message_complete():
                data = self._get_data(source, buffer_size)
                if not data:
                    return self._close_both(source, dest)
                nparsed = parser.execute(data, len(data))
                assert nparsed == len(data)
                if self.option('overwrite_host_header'):
                    data = HOST_REPLACE.sub(
                        '\r\nHost: %s\r\n' % self.proxy.backend, data)
                dest.sendall(data)
        keep_alive_src = parser.should_keep_alive()
        method = parser.get_method()

        if on_between_handle():
            # Getting the HTTP response and sending it back to the source.
            parser = HttpParser()
            while not (parser.is_message_complete() or
                       (method == 'HEAD' and parser.is_headers_complete())):
                data = self._get_data(dest, buffer_size)
                if not data:
                    return self._close_both(source, dest)
                nparsed = parser.execute(data, len(data))
                assert nparsed == len(data)
                source.sendall(data)
            keep_alive_dst = parser.should_keep_alive()

            # do we close the client ?
            if not keep_alive_src or not self.option('keep_alive'):
                source.close()
                source._closed = True

            if (not keep_alive_dst or not self.option('reuse_socket')
                    or not self.option('keep_alive')):
                dest.close()
                dest._closed = True
        else:
            keep_alive_dst = False

        return keep_alive_dst and self.option('keep_alive')
示例#2
0
    def run(self):

        self.client.settimeout(0.8)
        HTTP_Request = ""
        p = HttpParser()
        header_done = False
        destination_host = ''
        global isAvailable

        while True:
            try:
                Requestline = self.client.recv(self.max)
                if Requestline != "":
                    HTTP_Request += Requestline
                else:
                    break
            except socket.timeout, socket.error:
                #message = 'status: request timeout or socket error'
                #msglen = msg_len(message)
                #os.write(self.child_w, msglen + message)
                #print "request timeout OR socket error"
                break
            except IOError:
                print "ioerror"
                break
示例#3
0
def proxy(data):
    recved = len(data)

    idx = data.find("\r\n")
    if idx <= 0:
        return

    line, rest = data[:idx], data[idx:]
    if line.startswith("CONNECT"):
        parts = line.split(None)
        netloc = parts[1]
        remote = parse_address(netloc, 80)

        reply_msg = "%s 200 OK\r\n\r\n" % parts[2]
        return {"remote": remote, "reply": reply_msg, "data": ""}

    parser = HttpParser()
    parsed = parser.execute(data, recved)
    if parsed != recved:
        return {
            'close': 'HTTP/1.0 502 Gateway Error\r\n\r\nError parsing request'
        }

    if not parser.get_url():
        return

    parsed_url = urlparse.urlparse(parser.get_url())

    is_ssl = parsed_url.scheme == "https"
    remote = parse_address(parsed_url.netloc, 80)

    return {"remote": remote, "ssl": is_ssl}
示例#4
0
    def run(self):

        HTTP_Request = self.client.recv(self.max)
        p = HttpParser()
        header_done = False
        destination_host = ''

        if HTTP_Request:
            print 'Got something from ' + str(self.address) + '...'
            request_length = len(HTTP_Request)
            nparsed = p.execute(HTTP_Request, request_length)
            assert nparsed == request_length

            if p.is_headers_complete() and not header_done:
                print(p.get_headers())
                print(p.get_headers()['Host'])
                destination_host = p.get_headers()['Host']
                header_done = True

                Relay_socket = socket.socket(socket.AF_INET,
                                             socket.SOCK_STREAM)
                Relay_socket.connect((destination_host, 80))
                Relay_socket.sendall(HTTP_Request)
                print 'Forwarding data to destination host...'

                while True:
                    HTTP_Response = Relay_socket.recv(self.max)
                    if not HTTP_Response:
                        break
                    else:
                        print 'Received data back. Forwarding to the client...'
                        self.client.sendall(HTTP_Response)

            self.client.close()
            Relay_socket.close()
示例#5
0
    def saveTCP(self, index, path):
        if os.name == 'nt':
            path = path.replace('file://', '')[1:]
        else:
            path = path.replace('file://', '')

        if (index + 1) in network_sniffer.getTcpBodies():
            f = open(path, 'wb')

            try:
                p = HttpParser()
                recved = len(network_sniffer.getTcpBodies()[index + 1]['data'])
                nparsed = p.execute(
                    network_sniffer.getTcpBodies()[index + 1]['data'], recved)
                assert nparsed == recved
                f.write(p.recv_body())

                ret = '解析到 HTTP 报文,已保存 HTTP 数据。'
            except AssertionError:
                f.write(network_sniffer.getTcpBodies()[index + 1]['data'])

                ret = '未解析到 HTTP 报文,已保存 TCP 数据。'

            f.close()

            return ret

        else:
            return '数据包不是 TCP 分段的最后一段。'
示例#6
0
    def iter_items(self, partition):
        """ Yields objects in the source's native format """

        warc_stream = self.open_warc_stream(partition["path"])

        for record in warc_stream:

            if not record.url:
                continue

            if record['Content-Type'] != 'application/http; msgtype=response':
                continue

            url = URL(record.url, check_encoding=True)

            do_parse, index_level = self.qualify_url(url)

            if not do_parse:
                continue

            payload = record.payload.read()
            parser = HttpParser()
            parser.execute(payload, len(payload))

            headers = parser.get_headers()

            if 'text/html' not in headers.get("content-type", ""):
                # print "Not HTML?", record.url, headers
                continue

            yield url, headers, "html", index_level, parser.recv_body()
示例#7
0
def parse_request(http_request, protocol, host, port):
    """
    Parse HTTP request form Burp Suite to dict
    TODO cookie parse
    """
    httpParser = HttpParser()
    httpParser.execute(http_request, len(http_request))

    header = dict(httpParser.get_headers())
    header.pop("Content-Length")  # remove Content-Length
    # cookie = header["Cookie"]
    body = httpParser.recv_body()
    method = httpParser.get_method()
    url = protocol + "://" + host + httpParser.get_path()
    query = httpParser.get_query_string()

    params = dict(urlparse.parse_qsl(query))
    data = dict(urlparse.parse_qsl(body)) if method == "POST" else {}
    try:
        jsondata = json.loads(
            body) if method == "POST" and header["Content-Type"] == "application/json" else {}
    except Exception as e:
        print "[!] " + e
        jsondata = {}
    return method, url, header, params, data, jsondata
示例#8
0
    def __init__(self, *args, **kwargs):
        super(PupyHTTPWrapperServer, self).__init__(*args, **kwargs)

        self.parser = HttpParser()
        self.is_http = None
        self.body = []
        self.downstream_buffer = ''
示例#9
0
    def inject(self, dest, to_backend, data, http=False):
        modified_data = data
        if http:
            # to_backend = not to_backend
            parser = HttpParser()
            parser.execute(data, len(data))

            query = parser.get_query_string()
            url = parser.get_url()
            body = parser.recv_body()
            if body:
                inject_in = body
            elif query:
                inject_in = query
            else:
                inject_in = url
            modified_data = data.replace(
                inject_in, "%s%s" % (inject_in, os.urandom(100))
            )

            # modified_data = data.replace(inject_in, new_inject_in)
        if not to_backend:      # back to the client
            middle = len(data) / 2
            modified_data = data[:middle] + os.urandom(100) + data[middle:]

        # sending the data tp the backend
        dest.sendall(modified_data)
def main():

    p = HttpParser()
    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    body = []
    header_done = False
    try:
        s.connect(('gunicorn.org', 80))
        s.send(b("GET / HTTP/1.1\r\nHost: gunicorn.org\r\n\r\n"))

        while True:
            data = s.recv(1024)
            if not data:
                break

            recved = len(data)
            nparsed = p.execute(data, recved)
            assert nparsed == recved

            if p.is_headers_complete() and not header_done:
                print(p.get_headers())
                print(p.get_headers()['content-length'])
                print(p.get_method())
                header_done = True

            if p.is_partial_body():
                body.append(p.recv_body())

            if p.is_message_complete():
                break

        print(b("").join(body))

    finally:
        s.close()
示例#11
0
    def run(self):
        while self.running:
            data, addr = self.listener.recvfrom(4096)
            http_pareser = HttpParser()
            http_pareser.execute(data, len(data))
            headers = http_pareser.get_headers()

            try:
                if headers['NTS'] == 'ssdp:alive' and headers[
                        'NT'] == 'urn:zenterio-net:service:X-CTC_RemotePairing:1':
                    stb = STB(uuid=headers['USN'][5:41],
                              location=headers['LOCATION'],
                              nt=headers['NT'])
                    self.mutex.acquire(1)
                    for x in self.stbs:
                        if x.uuid == stb.uuid:
                            break
                    else:
                        self.stbs.append(stb)
                        log.info('-------------------------------------------')
                        log.info("New STB detected!")
                        log.info("UUID: " + stb.uuid)
                        log.info("Location: " + stb.location)
                        log.info("NT: " + stb.nt)
                    self.mutex.release()
            except:
                pass
示例#12
0
    def start(self):
        signal.signal(signal.SIGTERM, SIG_DFL)
        print(f"Worker booted with pid: {os.getpid()}")
        while True:
            body = []
            conn, addr = self.socket.accept()
            http_parser = HttpParser()
            with conn:
                while True:
                    data = conn.recv(1024)
                    if not data:
                        break
                    recved = len(data)
                    nparsed = http_parser.execute(data, recved)
                    assert nparsed == recved

                    if http_parser.is_headers_complete():
                        print(http_parser.get_headers())

                    if http_parser.is_partial_body():
                        body.append(http_parser.recv_body())

                    if http_parser.is_message_complete():
                        break

                buffered_body = io.StringIO("".join(body))
                koi(self.app,
                    conn,
                    request_method=http_parser.get_method(),
                    headers=http_parser.get_headers(),
                    body=buffered_body,
                    content_length=http_parser.get_headers().get(
                        'content-length', 0))
示例#13
0
    def __call__(self, addr):
        '''Since an instance of HttpServer is passed to the Service
        class (with appropriate request_handler established during
        initialization), this __call__ method is what's actually
        invoked by diesel.
        '''
        data = None
        while True:
            try:
                h = HttpParser()
                body = []
                while True:
                    if data:
                        used = h.execute(data, len(data))
                        if h.is_headers_complete():
                            body.append(h.recv_body())
                        if h.is_message_complete():
                            data = data[used:]
                            break
                    data = receive()

                env = h.get_wsgi_environ()
                if 'HTTP_CONTENT_LENGTH' in env:
                    env['CONTENT_LENGTH'] = env.pop("HTTP_CONTENT_LENGTH")
                if 'HTTP_CONTENT_TYPE' in env:
                    env['CONTENT_TYPE'] = env.pop("HTTP_CONTENT_TYPE")

                env.update({
                    'wsgi.version': (1, 0),
                    'wsgi.url_scheme': 'http',  # XXX incomplete
                    'wsgi.input': cStringIO.StringIO(''.join(body)),
                    'wsgi.errors': FileLikeErrorLogger(hlog),
                    'wsgi.multithread': False,
                    'wsgi.multiprocess': False,
                    'wsgi.run_once': False,
                    'REMOTE_ADDR': addr[0],
                    'SERVER_NAME': HOSTNAME,
                    'SERVER_PORT': str(self.port),
                })
                req = Request(env)

                resp = self.request_handler(req)
                if 'Server' not in resp.headers:
                    resp.headers.add('Server', SERVER_TAG)
                if 'Date' not in resp.headers:
                    resp.headers.add(
                        'Date',
                        utcnow().strftime("%a, %d %b %Y %H:%M:%S UTC"))

                assert resp, "HTTP request handler _must_ return a response"

                self.send_response(resp, version=h.get_version())

                if (not h.should_keep_alive()) or \
                    resp.headers.get('Connection', '').lower() == "close" or \
                    resp.headers.get('Content-Length') == None:
                    return

            except ConnectionClosed:
                break
示例#14
0
    def recv_http_response(self, conn):
        response = HttpParser(kind=1)
        status_code = None
        headers = None

        try:
            while True:
                chunk = conn.recv(1024)

                response.execute(chunk, len(chunk))
                if response.is_headers_complete():
                    headers = response.get_headers()
                    status_code = response.get_status_code()

                    content_length = headers.get('content-length')
                    if not content_length or int(content_length) == 0:
                        break

                if response.is_message_complete():
                    break

                if not chunk:
                    raise EOFError('Incomplete Message')

        except Exception as e:
            raise GeneralProxyError(
                'HTTP Proxy communication error ({})'.format(e))

        return status_code, headers
示例#15
0
def make_request(sock, server_name):
    """
    Given an open socket, makes a simple HTTP request, parses the response, and
    returns a dictionary containing the HTTP headers that were returned by the
    server.
    """
    p = HttpParser()

    request = ('GET / HTTP/1.0\r\n' +
               'User-Agent: pySSLScan\r\n' +
               'Host: %s\r\n\r\n' % (server_name,))
    sock.write(request.encode('ascii'))

    headers = None
    while True:
        data = sock.recv(1024)
        if not data:
            break

        recved = len(data)
        nparsed = p.execute(data, recved)
        assert nparsed == recved

        if p.is_headers_complete():
            headers = p.get_headers()
            break

    return headers
示例#16
0
 def send_request(self, nocallback=False):
     self._connected = True
     req = (
         'GET %s HTTP/1.1',
         'Host: %s',
         # t.co will return 200 and use js/meta to redirect using the following :-(
         # 'User-Agent: Mozilla/5.0 (X11; Linux x86_64; rv:16.0) Gecko/20100101 Firefox/16.0',
         'User-Agent: FetchTitle/1.0',
         'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.7',
         'Accept-Language: zh-cn,zh;q=0.7,en;q=0.3',
         'Accept-Charset: utf-8,gb18030;q=0.7,*;q=0.7',
         'Accept-Encoding: gzip, deflate',
         'Connection: keep-alive',
     )
     path = self.url.path or '/'
     if self.url.query:
         path += '?' + self.url.query
     req = '\r\n'.join(req) % (
         path,
         self.host,
     )
     if self._cookie:
         req += '\r\n' + self._cookie
     req += '\r\n\r\n'
     self.stream.write(req.encode())
     self.headers_done = False
     self.parser = HttpParser(decompress=True)
     if not nocallback:
         self.stream.read_until_close(
             # self.addr will have been changed when close callback is run
             partial(self.on_data, close=True, addr=self.addr),
             streaming_callback=self.on_data,
         )
示例#17
0
    def __init__(self):
        # Loading the protocol certificates.
        ssl_context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
        ssl_context.load_cert_chain("ssl/server.crt", "ssl/server.key")

        # Initiates the HttpParser object.
        self.http_parser = HttpParser()

        # Creates the TLS flag.
        self.using_tls = False

        # Initiating our HTTP transport with the emulated client.
        self.HTTP_Protocol = HTTP(using_ssl=False)

        # Setting our SSL context for the server.
        ssl_context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
        ssl_context.load_cert_chain("ssl/server.crt", "ssl/server.key")

        # Opening our HTTPS transport.
        self.HTTPS_Protocol = asyncio.sslproto.SSLProtocol(
            loop=asyncio.get_running_loop(),
            app_protocol=HTTP(using_ssl=True),
            sslcontext=ssl_context,
            waiter=None,
            server_side=True,
        )
示例#18
0
def findhue():
    msg = \
        'M-SEARCH * HTTP/1.1\r\n' \
        'HOST:239.255.255.250:1900\r\n' \
        'ST:upnp:rootdevice\r\n' \
        'MX:2\r\n' \
        'MAN:"ssdp:discover"\r\n' \
        '\r\n'

    # Set up UDP socket
    s = socket(AF_INET, SOCK_DGRAM, IPPROTO_UDP)
    s.settimeout(5)
    s.sendto(msg.encode('utf-8'), ('239.255.255.250', 1900) )

    try:
        while True:
            data, addr = s.recvfrom(65507)
            p = HttpParser()
            recved = len(data)
            nparsed = p.execute(data, recved)
            assert nparsed == recved
            if p.is_headers_complete():
                headers = p.get_headers()
                if 'hue-bridgeid' in headers:
                    return addr,headers

            if p.is_message_complete():
                break
    except timeout:
        pass
    return None
示例#19
0
def iter_warc_records(warc_file, domain_whitelist=None, only_homepages=None):
    """ Selective iterator over records in a WARC file """

    for _, record in enumerate(warc_file):

        if not record.url:
            continue

        if record['Content-Type'] != 'application/http; msgtype=response':
            continue

        url = URL(record.url, check_encoding=True)

        if domain_whitelist is not None:
            if url.domain not in domain_whitelist:
                continue

        elif only_homepages:
            if url.parsed.path != "/" or url.parsed.query != "":
                continue

        payload = record.payload.read()
        parser = HttpParser()
        parser.execute(payload, len(payload))

        headers = parser.get_headers()

        if 'text/html' not in headers.get("content-type", ""):
            # print "Not HTML?", record.url, headers
            continue

        yield url, headers, parser.recv_body()
 def __init__(self, current_key_hex, partner_key_hex):
   # self.body = []
   self.parser = HttpParser(kind=2, decompress=True)
   self.data_bytes = 0
   self.total_bytes = 0
   self.current_key_hex = current_key_hex
   self.partner_key_hex = partner_key_hex
   self.is_request = None
   self.service = None
示例#21
0
 def receive_buffer(self, buf):
     self.buffer += buf
     parser = HttpParser()
     recved = len(self.buffer)
     nparsed = parser.execute(self.buffer, recved)
     assert nparsed == recved
     if parser.is_message_complete():
         return (True, parser)
     return (False, parser)
示例#22
0
    def handler(self):
        httpParser1 = HttpParser()
        httpParser2 = HttpParser()
        serverSocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        serverSocket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
        serverSocket.bind(('', 5000))
        serverSocket.listen(1)
        conn, addr = serverSocket.accept()
        cumulatedPacketLength = 0
        while 1:
            data = conn.recv(1024)
            receivedPacketLength = len(data)
            httpParser1.execute(data, receivedPacketLength)
            print httpParser1.get_method()
            cumulatedPacketLength += receivedPacketLength
            if cumulatedPacketLength > 235:
                self.isReceivedRequestMatchExpectation = True
                response_body_raw = '{"success":true,"data":"FILE_FOUND_AND_LOADED"}'
                conn.send('%s %s %s\r\n%s: %s\r\n%s: %s\r\n\r\n%s' % (  'HTTP/1.1', '200', 'OK',\
                                                                        'Content-Type','application/json; charset=utf-8',\
                                                                        'Content-Length',len(response_body_raw),\
                                                                        response_body_raw))
                conn.close()
            break

        serverSocket.listen(1)
        conn, addr = serverSocket.accept()
        cumulatedPacketLength = 0
        while 1:
            data = conn.recv(1024)
            receivedPacketLength = len(data)
            httpParser2.execute(data, receivedPacketLength)
            print httpParser2.get_method()
            cumulatedPacketLength += receivedPacketLength
            if cumulatedPacketLength > 235:
                self.isReceivedRequestMatchExpectation = True
                response_body_raw = '{"success":true,"data":"FILE_FOUND_AND_LOADED"}'
                conn.send('%s %s %s\r\n%s: %s\r\n%s: %s\r\n\r\n%s' % (  'HTTP/1.1', '200', 'OK',\
                                                                        'Content-Type','application/json; charset=utf-8',\
                                                                        'Content-Length',len(response_body_raw),\
                                                                        response_body_raw))
                conn.close()
                serverSocket.close()
            break
示例#23
0
def handle(connection, address, pid, queue_obj):
  import logging
  import json
  from queue import Full

  logging.basicConfig(level=logging.DEBUG)
  logger = logging.getLogger("process-%r" % (address,))
  content = []
  parser = HttpParser()

  try:
    logger.debug("Connected %r at %r", connection, address)
    while True:
      resp = connection.recv(psize)
      recved = len(resp)

      parsed = parser.execute(resp, recved)
      assert parsed == recved

      if parser.is_headers_complete():
        parser.get_headers()

      if parser.is_partial_body():
        content.append(parser.recv_body())

      if parser.is_message_complete():
        break
  except:
    logger.exception("Problem handling request: %s", sys.exc_info()[1])
    send_and_close(connection, 500)
    return

  parsed_json = {}
  data = None

  try:
    parsed_json = json.loads("".join(map(lambda s: s.decode("utf-8"), content)))
    data = parsed_json.get('data')
    url = parsed_json.get('callback')
    key = parsed_json.get('private_key')
  except:
    logger.exception("Problem decoding JSON: %s", sys.exc_info()[1])
  finally:
    if data is None:
      send_and_close(connection, 400, {"message": "JSON Parse Error"})
    elif data == 'ping':
      send_and_close(connection, 200, {"started": started, "queue": queue_obj.qsize()})
    elif data == 'stop':
      send_and_close(connection, 200, {"message": "Shutting down"})
      os.kill(pid, signal.SIGUSR1)
    elif 'trackers' in data and 'hashes' in data:
      try:
        queue_obj.put({"data": [data, url, key], "address": address}, False)
        send_and_close(connection, 200, {"message": ("in queue [%r]" % (address,))})
      except Full:
        send_and_close(connection, 429, {"message": "Server queue is full. Try another one."})
示例#24
0
文件: http.py 项目: bjornua/dna
    def __init__(self, stream, kind=HTTP_BOTH, decompress=False):
        """ constructor of HttpStream. 

        :attr stream: an io.RawIOBase object
        :attr kind: Int,  could be 0 to parseonly requests, 
        1 to parse only responses or 2 if we want to let
        the parser detect the type.
        """
        self.parser = HttpParser(kind=kind, decompress=decompress)
        self.stream = stream
示例#25
0
def findhue():  #Auto-find bridges on network & get list
    r = requests.get("https://discovery.meethue.com/")
    bridgelist = json.loads(r.text)
    i = 0
    for b in bridgelist:
        i += 1

    if commandlineargs.bridgeid is not None:
        found = False
        for idx, b in enumerate(bridgelist):
            if b["id"] == commandlineargs.bridgeid:
                bridge = idx
                found = True
                break
        if not found:
            sys.exit("bridge {} was not found".format(
                commandlineargs.bridgeid))
    elif len(bridgelist) > 1:
        print("Multiple bridges found. Select one of the bridges below (",
              list(bridgelist), ")")
        bridge = int(input())
    else:
        bridge = 0  #Default to the only bridge if only one is found

    hueip = bridgelist[bridge][
        'internalipaddress']  #Logic currently assumes 1 bridge on the network
    print("I will use the bridge at ", hueip)

    msg = \
        'M-SEARCH * HTTP/1.1\r\n' \
        'HOST:' + hueip +':1900\r\n' \
        'ST:upnp:rootdevice\r\n' \
        'MX:2\r\n' \
        'MAN:"ssdp:discover"\r\n' \
        '\r\n'
    s = socket(AF_INET, SOCK_DGRAM, IPPROTO_UDP)
    s.settimeout(12)
    s.sendto(msg.encode('utf-8'), (hueip, 1900))
    try:
        while True:
            data, addr = s.recvfrom(65507)
            p = HttpParser()
            recved = len(data)
            nparsed = p.execute(data, recved)
            assert nparsed == recved
            if p.is_headers_complete():
                headers = p.get_headers()
                if 'hue-bridgeid' in headers:
                    return addr, headers
            if p.is_message_complete():
                break
    except timeout:
        verbose('Timed out, better luck next time')
        pass
    return None
示例#26
0
    def __init__(self, handle, server):
        self.server = server
        self.request = None
        self.parser = HttpParser(kind=0)  # request only parser

        self._handle = handle
        self._handle.start_read(self._on_read)
        self._closed = False
        self._must_close = False
        self._pending_writes = 0
        self._remote_address = self._handle.getpeername()
示例#27
0
    def __init__(self, data):
        self.parser = HttpParser()
        self.parser.execute(data, len(data))

        self.method = self.parser.get_method()
        self.path = self.parser.get_path()
        self.headers = self.parser.get_headers()
        self.querystring = parse_qs(unquote(self.parser.get_query_string()),
                                    keep_blank_values=True)
        if self.querystring:
            self.path += "?{}".format(self.parser.get_query_string())
示例#28
0
	def parse_request(self, message):
		try:
		    from http_parser.parser import HttpParser
		except ImportError:
		    from http_parser.pyparser import HttpParser

		p = HttpParser()
		nparsed = p.execute(message,len(message))
		
		self.method = p.get_method()
		self.path = p.get_path()
		self.headers = p.get_headers()

		if p.get_method() == 'GET':
			self.status = 200

		#if "Range" in p.get_headers():
		#	strings = self.headers["Range"]
		#	print strings

		elif p.get_method() != 'GET':
			self.status = 501		#if the method is not a GET
			#TODO maybe make this a head request eventually if you do the download accelerator

		if not p.get_method():
			self.status = 400

		if p.get_path() == '/':
			self.path = '/index.html'

		elif p.get_path().endswith('/'):
			self.path += 'index.html'
		
		if p.get_path() is None:
			self.status = 501


		#print self.path
		"""
		print '\nMethod: ' 
		print p.get_method() 
		print '\nPath: ' 
		print p.get_path()
		print '\nHeaders: ' 
		print p.get_headers()
		print '\nVersion: '
		version = p.get_version()
		print version
		"""
		#print '\nRESPONSE CODE: ' + str(self.status) + '\n'
		#print self.path
		#print self.status
		#working so far
示例#29
0
    def __init__(self, *args, **kwargs):
        super(PupyHTTPWrapperServer, self).__init__(*args, **kwargs)

        self.parser = HttpParser()
        self.is_http = None
        self.body = []
        self.downstream_buffer = Buffer()

        self.well_known = ('GET', 'POST', 'OPTIONS', 'HEAD', 'PUT', 'DELETE')
        self.omit = tuple('{} {}'.format(x, y) for x in self.well_known
                          for y in (self.path, '/wsapp '))
        self.probe_len = max(len(x) for x in self.omit)
示例#30
0
def proxy(data):
    """
    the function called by tproxy to determine where to send traffic

    tproxy will call this function repeatedly for the same connection, as we
    receive more incoming data, until we return something other than None.

    typically our response tells tproxy where to proxy the connection to, but
    may also tell it to hang up, or respond with some error message.
    """

    log = logging.getLogger("proxy")

    bytes_received = len(data)

    parser = HttpParser()
    bytes_parsed = parser.execute(data, bytes_received)

    if bytes_parsed != bytes_received:
        return {'close': 'HTTP/1.0 400 Bad Request\r\n\r\nParse error'}

    if not parser.is_headers_complete():
        if bytes_received > MAX_HEADER_LENGTH:
            return {
                'close': 'HTTP/1.0 400 Bad Request\r\n'
                '\r\nHeaders are too large'
            }
        return None

    headers = parser.get_headers()

    # the hostname may be in the form of hostname:port, in which case we want
    # to discard the port, and route just on hostname
    route_host = headers.get('HOST', None)
    if route_host:
        match = _HOST_PORT_REGEXP.match(route_host)
        if match:
            route_host = match.group(1)

    try:
        log.debug("Routing %r" % (parser.get_url(), ))
        return _ROUTER.route(route_host, parser.get_method(),
                             parser.get_path(), parser.get_query_string())
    except Exception, err:
        log.error("error routing %r, %s" % (
            parser.get_url(),
            traceback.format_exc(),
        ))
        gevent.sleep(ERROR_DELAY)
        return {
            'close': 'HTTP/1.0 502 Gateway Error\r\n'
            '\r\nError routing request'
        }