示例#1
0
    def iter_items(self, partition):
        """ Yields objects in the source's native format """

        warc_stream = self.open_warc_stream(partition["path"])

        for record in warc_stream:

            if not record.url:
                continue

            if record['Content-Type'] != 'application/http; msgtype=response':
                continue

            url = URL(record.url, check_encoding=True)

            do_parse, index_level = self.qualify_url(url)

            if not do_parse:
                continue

            payload = record.payload.read()
            parser = HttpParser()
            parser.execute(payload, len(payload))

            headers = parser.get_headers()

            if 'text/html' not in headers.get("content-type", ""):
                # print "Not HTML?", record.url, headers
                continue

            yield url, headers, "html", index_level, parser.recv_body()
示例#2
0
    def run(self):
        while self.running:
            data, addr = self.listener.recvfrom(4096)
            http_pareser = HttpParser()
            http_pareser.execute(data, len(data))
            headers = http_pareser.get_headers()

            try:
                if headers['NTS'] == 'ssdp:alive' and headers[
                        'NT'] == 'urn:zenterio-net:service:X-CTC_RemotePairing:1':
                    stb = STB(uuid=headers['USN'][5:41],
                              location=headers['LOCATION'],
                              nt=headers['NT'])
                    self.mutex.acquire(1)
                    for x in self.stbs:
                        if x.uuid == stb.uuid:
                            break
                    else:
                        self.stbs.append(stb)
                        log.info('-------------------------------------------')
                        log.info("New STB detected!")
                        log.info("UUID: " + stb.uuid)
                        log.info("Location: " + stb.location)
                        log.info("NT: " + stb.nt)
                    self.mutex.release()
            except:
                pass
示例#3
0
    def iter_items(self, partition):
        """ Yields objects in the source's native format """

        warc_stream = self.open_warc_stream(partition["path"])

        for record in warc_stream:

            if not record.url:
                continue

            if record['Content-Type'] != 'application/http; msgtype=response':
                continue

            url = URL(record.url, check_encoding=True)

            do_parse, index_level = self.qualify_url(url)

            if not do_parse:
                continue

            payload = record.payload.read()
            parser = HttpParser()
            parser.execute(payload, len(payload))

            headers = parser.get_headers()

            if 'text/html' not in headers.get("content-type", ""):
                # print "Not HTML?", record.url, headers
                continue

            yield url, headers, "html", index_level, parser.recv_body()
示例#4
0
def iter_warc_records(warc_file, domain_whitelist=None, only_homepages=None):
    """ Selective iterator over records in a WARC file """

    for _, record in enumerate(warc_file):

        if not record.url:
            continue

        if record['Content-Type'] != 'application/http; msgtype=response':
            continue

        url = URL(record.url, check_encoding=True)

        if domain_whitelist is not None:
            if url.domain not in domain_whitelist:
                continue

        elif only_homepages:
            if url.parsed.path != "/" or url.parsed.query != "":
                continue

        payload = record.payload.read()
        parser = HttpParser()
        parser.execute(payload, len(payload))

        headers = parser.get_headers()

        if 'text/html' not in headers.get("content-type", ""):
            # print "Not HTML?", record.url, headers
            continue

        yield url, headers, parser.recv_body()
示例#5
0
    def inject(self, dest, to_backend, data, http=False):
        modified_data = data
        if http:
            # to_backend = not to_backend
            parser = HttpParser()
            parser.execute(data, len(data))

            query = parser.get_query_string()
            url = parser.get_url()
            body = parser.recv_body()
            if body:
                inject_in = body
            elif query:
                inject_in = query
            else:
                inject_in = url
            modified_data = data.replace(
                inject_in, "%s%s" % (inject_in, os.urandom(100))
            )

            # modified_data = data.replace(inject_in, new_inject_in)
        if not to_backend:      # back to the client
            middle = len(data) / 2
            modified_data = data[:middle] + os.urandom(100) + data[middle:]

        # sending the data tp the backend
        dest.sendall(modified_data)
示例#6
0
def parse_request(http_request, protocol, host, port):
    """
    Parse HTTP request form Burp Suite to dict
    TODO cookie parse
    """
    httpParser = HttpParser()
    httpParser.execute(http_request, len(http_request))

    header = dict(httpParser.get_headers())
    header.pop("Content-Length")  # remove Content-Length
    # cookie = header["Cookie"]
    body = httpParser.recv_body()
    method = httpParser.get_method()
    url = protocol + "://" + host + httpParser.get_path()
    query = httpParser.get_query_string()

    params = dict(urlparse.parse_qsl(query))
    data = dict(urlparse.parse_qsl(body)) if method == "POST" else {}
    try:
        jsondata = json.loads(
            body) if method == "POST" and header["Content-Type"] == "application/json" else {}
    except Exception as e:
        print "[!] " + e
        jsondata = {}
    return method, url, header, params, data, jsondata
示例#7
0
    def recv_http_response(self, conn):
        response = HttpParser(kind=1)
        status_code = None
        headers = None

        try:
            while True:
                chunk = conn.recv(1024)

                response.execute(chunk, len(chunk))
                if response.is_headers_complete():
                    headers = response.get_headers()
                    status_code = response.get_status_code()

                    content_length = headers.get('content-length')
                    if not content_length or int(content_length) == 0:
                        break

                if response.is_message_complete():
                    break

                if not chunk:
                    raise EOFError('Incomplete Message')

        except Exception as e:
            raise GeneralProxyError(
                'HTTP Proxy communication error ({})'.format(e))

        return status_code, headers
class Response_Parser():
    def __init__(self):
        self.parser = HttpParser()
        self.len_response = 0
        self.len_body = 0
        self.body = None
    def parse(self, raw_response):
        self.len_response = len(bytearray(raw_response))
        self.parser.execute(raw_response, self.len_response)
        self.body = self.parser.recv_body()
        self.len_body = len(bytearray(self.body))

    def get_all_keys(self):
        """Get All the key in request headers."""
        return self.parser.get_headers().keys()

    def get_keys(self, *args):
        header_keys = {}
        for key in args:
            header_keys[key] = self.parser.get_headers().get(key, '-')
        return header_keys

    def get_reponse(self, *args):
        values = self.get_keys(*args)
        status_code = self.parser.get_status_code()
        obj = HTTP_Response(status_code, values, self.len_response, self.len_body)
        return obj

    def get_body(self):
        return self.body
class Request_Parser():
    def __init__(self):
        self.parser = HttpParser()
        self.len_request = 0
        self.len_body = 0

    def parse(self, raw_requset):
        self.len_request = len(bytearray(raw_requset))
        self.parser.execute(raw_requset, self.len_request)
        self.len_body = len(bytearray(self.parser.recv_body()))

    def get_all_keys(self):
        """Get All the key in request headers."""
        return self.parser.get_headers().keys()

    def get_keys(self, *args):
        header_keys = {}
        for key in args:
            header_keys[key] = self.parser.get_headers().get(key, '-')
        return header_keys

    def get_request(self, *args):
        values = self.get_keys(*args)
        obj = HTTP_Requset(values, self.len_request, self.len_body)
        return obj

    def get_body(self):
        return self.parser.recv_body()
示例#10
0
def iter_warc_records(warc_file, domain_whitelist=None, only_homepages=None):
    """ Selective iterator over records in a WARC file """

    for _, record in enumerate(warc_file):

        if not record.url:
            continue

        if record['Content-Type'] != 'application/http; msgtype=response':
            continue

        url = URL(record.url, check_encoding=True)

        if domain_whitelist is not None:
            if url.domain not in domain_whitelist:
                continue

        elif only_homepages:
            if url.parsed.path != "/" or url.parsed.query != "":
                continue

        payload = record.payload.read()
        parser = HttpParser()
        parser.execute(payload, len(payload))

        headers = parser.get_headers()

        if 'text/html' not in headers.get("content-type", ""):
            # print "Not HTML?", record.url, headers
            continue

        yield url, headers, parser.recv_body()
示例#11
0
class Request:
    parser = None
    _body = None

    def __init__(self, data):
        self.parser = HttpParser()
        self.parser.execute(data, len(data))

        self.method = self.parser.get_method()
        self.path = self.parser.get_path()
        self.headers = self.parser.get_headers()
        self.querystring = parse_qs(unquote(self.parser.get_query_string()),
                                    keep_blank_values=True)
        if self.querystring:
            self.path += "?{}".format(self.parser.get_query_string())

    def add_data(self, data):
        self.parser.execute(data, len(data))

    @property
    def body(self):
        if self._body is None:
            self._body = decode_from_bytes(self.parser.recv_body())
        return self._body

    def __str__(self):
        return "{} - {} - {}".format(self.method, self.path, self.headers)
示例#12
0
 def __init__(self, raw):
     resp = HttpParser()
     resp.execute(raw.response, len(raw.response))
     self.headers = resp.get_headers()
     self.body = "".join(resp._body)
     self.raw = raw
     self.code = resp.get_status_code()
     self._json = None
示例#13
0
 def __init__(self, raw):
     self.raw = raw
     req = HttpParser()
     req.execute(raw.request, len(raw.request))
     self.headers = req.get_headers()
     self.body = b"".join(req._body)
     self.url = req.get_url()
     self.path = req.get_path()
     self.method = req.get_method()
     self.arguments = req.get_query_string()
     self.slug = [a for a in self.path.split('/') if a != '']
示例#14
0
class EmulatedClient(object):
    """ Class for emulating the client to the server.

        Notes:
            To accomplish a proper man-in-the-middle attack with TLS capability,
            the man-in-the-middle must be the one sending the original request to
            the server. With the emulated client we are changing the typical structure:

                client <-> server

            To one that looks like so:

                client <-> mitm (server) <-> mitm (emulated client) <-> server

            Where we then reply back to the client with the response the emulated client
            retrieved from the server on behalf of the client.
    """
    def __init__(self, using_ssl):
        # Creates our HttpParser object.
        self.http_parser = HttpParser()

        # Sets flag to whether or not we are using SSL.
        self.using_ssl = using_ssl

    async def connect(self, data):
        # Parses the data coming in.
        self.http_parser.execute(data, len(data))

        host = self.http_parser.get_wsgi_environ()["HTTP_HOST"]
        uri = self.http_parser.get_wsgi_environ()["RAW_URI"]

        # Sets the proper URL client is trying to reach.
        if self.using_ssl:
            url = f"https://{host}:{uri}"
        else:
            url = uri

        # Retrieves the destination server data.
        async with aiohttp.ClientSession() as session:
            async with session.get(url, ssl=False) as response:
                status = response.status
                reason = response.reason
                headers = response.headers
                response = await response.read()

        # Re-creates the servers response.
        resp = f"HTTP/1.1 {status} {reason}\r\n".encode("latin-1")
        for header in headers:
            resp += f"{header}: {headers[header]}\r\n".encode("latin-1")
        resp += b"\r\n" + response

        # Returns the data.
        return resp
示例#15
0
def get_headers_data(data):
    """ """
    parser = HttpParser()
    parser.execute(data, len(data))
    url = parser.get_url()
    method = parser.get_method()
    if method == 'CONNECT':
        host, _, port = url.partition(":")
    else:
        url = urlparse(url)
        host, _, port = url.netloc.partition(":")
    port = port and port.isdigit() and int(port) or 80
    return (host, port), method, parser.get_version()
示例#16
0
    def _handle(self,
                source,
                dest,
                to_backend,
                on_between_handle,
                data_sent=False):
        buffer_size = self.option('buffer')

        # Getting the HTTP query and sending it to the backend.
        parser = HttpParser()
        if not data_sent:
            while not parser.is_message_complete():
                data = self._get_data(source, buffer_size)
                if not data:
                    return self._close_both(source, dest)
                nparsed = parser.execute(data, len(data))
                assert nparsed == len(data)
                if self.option('overwrite_host_header'):
                    data = HOST_REPLACE.sub(
                        '\r\nHost: %s\r\n' % self.proxy.backend, data)
                dest.sendall(data)
        keep_alive_src = parser.should_keep_alive()
        method = parser.get_method()

        if on_between_handle():
            # Getting the HTTP response and sending it back to the source.
            parser = HttpParser()
            while not (parser.is_message_complete() or
                       (method == 'HEAD' and parser.is_headers_complete())):
                data = self._get_data(dest, buffer_size)
                if not data:
                    return self._close_both(source, dest)
                nparsed = parser.execute(data, len(data))
                assert nparsed == len(data)
                source.sendall(data)
            keep_alive_dst = parser.should_keep_alive()

            # do we close the client ?
            if not keep_alive_src or not self.option('keep_alive'):
                source.close()
                source._closed = True

            if (not keep_alive_dst or not self.option('reuse_socket')
                    or not self.option('keep_alive')):
                dest.close()
                dest._closed = True
        else:
            keep_alive_dst = False

        return keep_alive_dst and self.option('keep_alive')
示例#17
0
    def request(self):
        request_buff = ""
        request_parser = HttpParser()
        while True:
            r_data = self.socket.recv(ProxyHandler.BUFF_LEN)
            request_buff += r_data
            r_size = len(r_data)
            request_parser.execute(r_data, r_size)
            if request_parser.is_message_complete():
                break

        host = request_parser.get_headers().get('Host')
        url, port = self._analyse_host_and_port(host)
        remote_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
示例#18
0
def post_report(sock):
    st = os.statvfs(haystack_path)
    available_size = st.f_bavail * st.f_frsize
    obj = {}
    obj["listenip"] = listenip
    obj["listenport"] = listenport
    obj["disk_available_size"] = available_size
    obj["master"] = master
    obj["groupid"] = groupid
    obj["last_fileno"] = haystack.haystack_last_fileno
    body = json.dumps(obj)
    sock.send("POST /report HTTP/1.1\r\n")
    sock.send("Host: %s:%d\r\n" % (track.ip, track.port))
    sock.send("Content-Length: %d\r\n" % len(body))
    sock.send("Content-Type: application/json\r\n")
    sock.send("Connection: keep-alive\r\n")
    sock.send("\r\n")
    sock.send(body)

    parser = HttpParser()
    while True:
        data = sock.recv(1024)
        if not data:
            return False

        recved = len(data)
        nparsed = parser.execute(data, recved)
        assert nparsed == recved
        if parser.is_message_complete():
            break

    return parser.get_status_code() == 200
示例#19
0
    def __call__(self, addr):
        '''Since an instance of HttpServer is passed to the Service
        class (with appropriate request_handler established during
        initialization), this __call__ method is what's actually
        invoked by diesel.
        '''
        data = None
        while True:
            try:
                h = HttpParser()
                body = []
                while True:
                    if data:
                        used = h.execute(data, len(data))
                        if h.is_headers_complete():
                            body.append(h.recv_body())
                        if h.is_message_complete():
                            data = data[used:]
                            break
                    data = receive()

                env = h.get_wsgi_environ()
                if 'HTTP_CONTENT_LENGTH' in env:
                    env['CONTENT_LENGTH'] = env.pop("HTTP_CONTENT_LENGTH")
                if 'HTTP_CONTENT_TYPE' in env:
                    env['CONTENT_TYPE'] = env.pop("HTTP_CONTENT_TYPE")

                env.update({
                    'wsgi.version': (1, 0),
                    'wsgi.url_scheme': 'http',  # XXX incomplete
                    'wsgi.input': cStringIO.StringIO(''.join(body)),
                    'wsgi.errors': FileLikeErrorLogger(hlog),
                    'wsgi.multithread': False,
                    'wsgi.multiprocess': False,
                    'wsgi.run_once': False,
                    'REMOTE_ADDR': addr[0],
                    'SERVER_NAME': HOSTNAME,
                    'SERVER_PORT': str(self.port),
                })
                req = Request(env)

                resp = self.request_handler(req)
                if 'Server' not in resp.headers:
                    resp.headers.add('Server', SERVER_TAG)
                if 'Date' not in resp.headers:
                    resp.headers.add(
                        'Date',
                        utcnow().strftime("%a, %d %b %Y %H:%M:%S UTC"))

                assert resp, "HTTP request handler _must_ return a response"

                self.send_response(resp, version=h.get_version())

                if (not h.should_keep_alive()) or \
                    resp.headers.get('Connection', '').lower() == "close" or \
                    resp.headers.get('Content-Length') == None:
                    return

            except ConnectionClosed:
                break
示例#20
0
    def start(self):
        signal.signal(signal.SIGTERM, SIG_DFL)
        print(f"Worker booted with pid: {os.getpid()}")
        while True:
            body = []
            conn, addr = self.socket.accept()
            http_parser = HttpParser()
            with conn:
                while True:
                    data = conn.recv(1024)
                    if not data:
                        break
                    recved = len(data)
                    nparsed = http_parser.execute(data, recved)
                    assert nparsed == recved

                    if http_parser.is_headers_complete():
                        print(http_parser.get_headers())

                    if http_parser.is_partial_body():
                        body.append(http_parser.recv_body())

                    if http_parser.is_message_complete():
                        break

                buffered_body = io.StringIO("".join(body))
                koi(self.app,
                    conn,
                    request_method=http_parser.get_method(),
                    headers=http_parser.get_headers(),
                    body=buffered_body,
                    content_length=http_parser.get_headers().get(
                        'content-length', 0))
示例#21
0
    def run(self):

        HTTP_Request = self.client.recv(self.max)
        p = HttpParser()
        header_done = False
        destination_host = ''

        if HTTP_Request:
            print 'Got something from ' + str(self.address) + '...'
            request_length = len(HTTP_Request)
            nparsed = p.execute(HTTP_Request, request_length)
            assert nparsed == request_length

            if p.is_headers_complete() and not header_done:
                print(p.get_headers())
                print(p.get_headers()['Host'])
                destination_host = p.get_headers()['Host']
                header_done = True

                Relay_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
                Relay_socket.connect((destination_host,80))
                Relay_socket.sendall(HTTP_Request)
                print 'Forwarding data to destination host...'

                while True:
                    HTTP_Response = Relay_socket.recv(self.max)
                    if not HTTP_Response:
                        break
                    else:
                        print 'Received data back. Forwarding to the client...'
                        self.client.sendall(HTTP_Response)

            self.client.close()
            Relay_socket.close()
示例#22
0
def main():

    p = HttpParser()
    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    body = []
    header_done = False
    try:
        s.connect(("gunicorn.org", 80))
        s.send("GET / HTTP/1.1\r\nHost: gunicorn.org\r\n\r\n")

        while True:
            data = s.recv(1024)
            if not data:
                break

            recved = len(data)
            nparsed = p.execute(data, recved)
            assert nparsed == recved

            if p.is_headers_complete() and not header_done:
                print p.get_headers()
                print p.get_headers()["content-length"]
                header_done = True

            if p.is_partial_body():
                body.append(p.recv_body())

            if p.is_message_complete():
                break

        print "".join(body)

    finally:
        s.close()
示例#23
0
def make_request(sock, server_name):
    """
    Given an open socket, makes a simple HTTP request, parses the response, and
    returns a dictionary containing the HTTP headers that were returned by the
    server.
    """
    p = HttpParser()

    request = ('GET / HTTP/1.0\r\n' +
               'User-Agent: pySSLScan\r\n' +
               'Host: %s\r\n\r\n' % (server_name,))
    sock.write(request.encode('ascii'))

    headers = None
    while True:
        data = sock.recv(1024)
        if not data:
            break

        recved = len(data)
        nparsed = p.execute(data, recved)
        assert nparsed == recved

        if p.is_headers_complete():
            headers = p.get_headers()
            break

    return headers
示例#24
0
def proxy(data):
    recved = len(data)

    idx = data.find("\r\n")
    if idx <= 0:
        return

    line, rest = data[:idx], data[idx:]
    if line.startswith("CONNECT"):
        parts = line.split(None)
        netloc = parts[1]
        remote = parse_address(netloc, 80)

        reply_msg = "%s 200 OK\r\n\r\n" % parts[2]
        return {"remote": remote, "reply": reply_msg, "data": ""}

    parser = HttpParser()
    parsed = parser.execute(data, recved)
    if parsed != recved:
        return {
            'close': 'HTTP/1.0 502 Gateway Error\r\n\r\nError parsing request'
        }

    if not parser.get_url():
        return

    parsed_url = urlparse.urlparse(parser.get_url())

    is_ssl = parsed_url.scheme == "https"
    remote = parse_address(parsed_url.netloc, 80)

    return {"remote": remote, "ssl": is_ssl}
示例#25
0
async def _recv_request(client: AsyncSocket,
                        prefix: bytes) -> Tuple[Optional[HttpParser], bytes]:
    p = HttpParser()
    data = prefix
    num_parsed = 0
    if data:
        num_parsed = p.execute(prefix, len(prefix))
    while not p.is_message_complete():
        data = await client.recv(4096)
        if not data:
            return None, b''
        num_parsed = p.execute(data, len(data))
        if not p.is_message_complete() and num_parsed < len(data):
            # Bad request and couldn't parse the content properly
            return p, b''
    return p, data[num_parsed:]
示例#26
0
def findhue():
    msg = \
        'M-SEARCH * HTTP/1.1\r\n' \
        'HOST:239.255.255.250:1900\r\n' \
        'ST:upnp:rootdevice\r\n' \
        'MX:2\r\n' \
        'MAN:"ssdp:discover"\r\n' \
        '\r\n'

    # Set up UDP socket
    s = socket(AF_INET, SOCK_DGRAM, IPPROTO_UDP)
    s.settimeout(5)
    s.sendto(msg.encode('utf-8'), ('239.255.255.250', 1900) )

    try:
        while True:
            data, addr = s.recvfrom(65507)
            p = HttpParser()
            recved = len(data)
            nparsed = p.execute(data, recved)
            assert nparsed == recved
            if p.is_headers_complete():
                headers = p.get_headers()
                if 'hue-bridgeid' in headers:
                    return addr,headers

            if p.is_message_complete():
                break
    except timeout:
        pass
    return None
示例#27
0
def post_sync(sock, masterip, masterport):
    obj = {"last_fileno": haystack.haystack_last_fileno}
    body = json.dumps(obj)
    sock.send("POST /sync HTTP/1.1\r\n")
    sock.send("Host: %s:%d\r\n" % (masterip, masterport))
    sock.send("Content-Length: %d\r\n" % len(body))
    sock.send("Content-Type: application/json\r\n")
    sock.send("Connection: keep-alive\r\n")
    sock.send("\r\n")
    sock.send(body)

    parser = HttpParser()
    while True:
        #!!!ugly prevent recveive next http request
        data = sock.recv(1)
        if not data:
            return False

        recved = len(data)
        nparsed = parser.execute(data, recved)
        assert nparsed == recved
        if parser.is_message_complete():
            break

    return parser.get_status_code() == 200
示例#28
0
    def run(self):

        HTTP_Request = self.client.recv(self.max)
        p = HttpParser()
        header_done = False
        destination_host = ''

        if HTTP_Request:
            print 'Got something from ' + str(self.address) + '...'
            request_length = len(HTTP_Request)
            nparsed = p.execute(HTTP_Request, request_length)
            assert nparsed == request_length

            if p.is_headers_complete() and not header_done:
                print(p.get_headers())
                print(p.get_headers()['Host'])
                destination_host = p.get_headers()['Host']
                header_done = True

                Relay_socket = socket.socket(socket.AF_INET,
                                             socket.SOCK_STREAM)
                Relay_socket.connect((destination_host, 80))
                Relay_socket.sendall(HTTP_Request)
                print 'Forwarding data to destination host...'

                while True:
                    HTTP_Response = Relay_socket.recv(self.max)
                    if not HTTP_Response:
                        break
                    else:
                        print 'Received data back. Forwarding to the client...'
                        self.client.sendall(HTTP_Response)

            self.client.close()
            Relay_socket.close()
def main():

    p = HttpParser()
    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    body = []
    header_done = False
    try:
        s.connect(('gunicorn.org', 80))
        s.send(b("GET / HTTP/1.1\r\nHost: gunicorn.org\r\n\r\n"))

        while True:
            data = s.recv(1024)
            if not data:
                break

            recved = len(data)
            nparsed = p.execute(data, recved)
            assert nparsed == recved

            if p.is_headers_complete() and not header_done:
                print(p.get_headers())
                print(p.get_headers()['content-length'])
                print(p.get_method())
                header_done = True

            if p.is_partial_body():
                body.append(p.recv_body())

            if p.is_message_complete():
                break

        print(b("").join(body))

    finally:
        s.close()
示例#30
0
def handle_batch_client(sock):
    recvbuf = ""
    while True:
        rds, _, _ = select.select([sock], [], [], 60 * 5)
        if not rds:
            break

        data = sock.recv(1024)
        if not data:
            break
        recvbuf += data

        pos = recvbuf.find("\r\n\r\n")
        if pos == -1:
            continue
        parser = HttpParser()
        nparsed = parser.execute(recvbuf, pos + 4)
        if nparsed != pos + 4:
            logging.debug("pos:%d, nparsed:%d, recvbuf:%r", pos, nparsed, recvbuf)
        assert nparsed == pos + 4
        assert parser.is_headers_complete()
        headers = parser.get_headers()
        content_length = int(headers["Content-Length"]) if headers.has_key("Content-Length") else 0
        logging.debug("content length:%d", content_length)
        recvbuf = recvbuf[pos + 4 :]
        preread = recvbuf[:content_length]
        recvbuf = recvbuf[content_length:]
        keepalived = handle_request(sock, parser, preread)
        if not keepalived:
            break

    logging.debug("close client")
    sock.close()
示例#31
0
def proxy(data):
    recved = len(data)

    idx = data.find("\r\n")
    if idx <= 0:
        return

    line, rest = data[:idx], data[idx:]
    if line.startswith("CONNECT"):
        parts = line.split(None)
        netloc = parts[1]
        remote = parse_address(netloc, 80)

        reply_msg = "%s 200 OK\r\n\r\n" % parts[2]
        return {"remote": remote, 
                "reply": reply_msg,
                "data": ""}


    parser = HttpParser()
    parsed = parser.execute(data, recved)
    if parsed != recved:
        return  { 'close':'HTTP/1.0 502 Gateway Error\r\n\r\nError parsing request'}

    if not parser.get_url():
        return

    parsed_url = urlparse.urlparse(parser.get_url())

    is_ssl = parsed_url.scheme == "https"
    remote = parse_address(parsed_url.netloc, 80)

    return {"remote": remote, 
            "ssl": is_ssl}
示例#32
0
    def saveTCP(self, index, path):
        if os.name == 'nt':
            path = path.replace('file://', '')[1:]
        else:
            path = path.replace('file://', '')

        if (index + 1) in network_sniffer.getTcpBodies():
            f = open(path, 'wb')

            try:
                p = HttpParser()
                recved = len(network_sniffer.getTcpBodies()[index + 1]['data'])
                nparsed = p.execute(
                    network_sniffer.getTcpBodies()[index + 1]['data'], recved)
                assert nparsed == recved
                f.write(p.recv_body())

                ret = '解析到 HTTP 报文,已保存 HTTP 数据。'
            except AssertionError:
                f.write(network_sniffer.getTcpBodies()[index + 1]['data'])

                ret = '未解析到 HTTP 报文,已保存 TCP 数据。'

            f.close()

            return ret

        else:
            return '数据包不是 TCP 分段的最后一段。'
示例#33
0
    def _handle(self, source, dest, to_backend, on_between_handle,
                data_sent=False):
        buffer_size = self.option('buffer')

        # Getting the HTTP query and sending it to the backend.
        parser = HttpParser()
        if not data_sent:
            while not parser.is_message_complete():
                data = self._get_data(source, buffer_size)
                if not data:
                    return self._close_both(source, dest)
                nparsed = parser.execute(data, len(data))
                assert nparsed == len(data)
                if self.option('overwrite_host_header'):
                    data = HOST_REPLACE.sub('\r\nHost: %s\r\n'
                                            % self.proxy.backend, data)
                dest.sendall(data)
        keep_alive_src = parser.should_keep_alive()
        method = parser.get_method()

        if on_between_handle():
            # Getting the HTTP response and sending it back to the source.
            parser = HttpParser()
            while not (parser.is_message_complete() or
                       (method == 'HEAD' and parser.is_headers_complete())):
                data = self._get_data(dest, buffer_size)
                if not data:
                    return self._close_both(source, dest)
                nparsed = parser.execute(data, len(data))
                assert nparsed == len(data)
                source.sendall(data)
            keep_alive_dst = parser.should_keep_alive()

            # do we close the client ?
            if not keep_alive_src or not self.option('keep_alive'):
                source.close()
                source._closed = True

            if (not keep_alive_dst or not self.option('reuse_socket') or not
                self.option('keep_alive')):
                dest.close()
                dest._closed = True
        else:
            keep_alive_dst = False

        return keep_alive_dst and self.option('keep_alive')
示例#34
0
    def test_constructor(self):
        ''' Instance attributes autosubstitution.
        '''
        headers = {
            'Host': 'httpbin.org',
            'Connection': 'close',
        }
        hc = HttpCompiler(method='PATCH', headers=headers)
        qs = '/path/to/check'
        req = hc.build_raw(qs)

        p = HttpParser()
        p.execute(req, len(req))
        result_hdrs = p.get_headers()

        self.assertTrue(p.get_method(), 'PATCH')
        self.assertTrue(
            all([result_hdrs[h] == headers[h] for h in headers.keys()]))
示例#35
0
 def receive_buffer(self, buf):
     self.buffer += buf
     parser = HttpParser()
     recved = len(self.buffer)
     nparsed = parser.execute(self.buffer, recved)
     assert nparsed == recved
     if parser.is_message_complete():
         return (True, parser)
     return (False, parser)
示例#36
0
    def test_constructor(self):
        ''' Instance attributes autosubstitution.
        '''
        headers = {
            'Host': 'httpbin.org',
            'Connection': 'close',
        }
        hc = HttpCompiler(method='PATCH', headers=headers)
        qs = '/path/to/check'
        req = hc.build_raw(qs)

        p = HttpParser()
        p.execute(req, len(req))
        result_hdrs = p.get_headers()

        self.assertTrue(p.get_method(), 'PATCH')
        self.assertTrue(all(
            [result_hdrs[h] == headers[h] for h in headers.keys()]))
示例#37
0
 def receive_buffer(self, buf):
     self.buffer += buf
     parser = HttpParser()
     recved = len(self.buffer)
     nparsed = parser.execute(self.buffer, recved)
     assert nparsed == recved
     if parser.is_message_complete():
         return (True, parser)
     return (False, parser)
示例#38
0
    def handler(self):
        httpParser1 = HttpParser()
        httpParser2 = HttpParser()
        serverSocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        serverSocket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
        serverSocket.bind(('', 5000))
        serverSocket.listen(1)
        conn, addr = serverSocket.accept()
        cumulatedPacketLength = 0
        while 1:
            data = conn.recv(1024)
            receivedPacketLength = len(data)
            httpParser1.execute(data, receivedPacketLength)
            print httpParser1.get_method()
            cumulatedPacketLength += receivedPacketLength
            if cumulatedPacketLength > 235:
                self.isReceivedRequestMatchExpectation = True
                response_body_raw = '{"success":true,"data":"FILE_FOUND_AND_LOADED"}'
                conn.send('%s %s %s\r\n%s: %s\r\n%s: %s\r\n\r\n%s' % (  'HTTP/1.1', '200', 'OK',\
                                                                        'Content-Type','application/json; charset=utf-8',\
                                                                        'Content-Length',len(response_body_raw),\
                                                                        response_body_raw))
                conn.close()
            break

        serverSocket.listen(1)
        conn, addr = serverSocket.accept()
        cumulatedPacketLength = 0
        while 1:
            data = conn.recv(1024)
            receivedPacketLength = len(data)
            httpParser2.execute(data, receivedPacketLength)
            print httpParser2.get_method()
            cumulatedPacketLength += receivedPacketLength
            if cumulatedPacketLength > 235:
                self.isReceivedRequestMatchExpectation = True
                response_body_raw = '{"success":true,"data":"FILE_FOUND_AND_LOADED"}'
                conn.send('%s %s %s\r\n%s: %s\r\n%s: %s\r\n\r\n%s' % (  'HTTP/1.1', '200', 'OK',\
                                                                        'Content-Type','application/json; charset=utf-8',\
                                                                        'Content-Length',len(response_body_raw),\
                                                                        response_body_raw))
                conn.close()
                serverSocket.close()
            break
示例#39
0
def handle(connection, address, pid, queue_obj):
  import logging
  import json
  from queue import Full

  logging.basicConfig(level=logging.DEBUG)
  logger = logging.getLogger("process-%r" % (address,))
  content = []
  parser = HttpParser()

  try:
    logger.debug("Connected %r at %r", connection, address)
    while True:
      resp = connection.recv(psize)
      recved = len(resp)

      parsed = parser.execute(resp, recved)
      assert parsed == recved

      if parser.is_headers_complete():
        parser.get_headers()

      if parser.is_partial_body():
        content.append(parser.recv_body())

      if parser.is_message_complete():
        break
  except:
    logger.exception("Problem handling request: %s", sys.exc_info()[1])
    send_and_close(connection, 500)
    return

  parsed_json = {}
  data = None

  try:
    parsed_json = json.loads("".join(map(lambda s: s.decode("utf-8"), content)))
    data = parsed_json.get('data')
    url = parsed_json.get('callback')
    key = parsed_json.get('private_key')
  except:
    logger.exception("Problem decoding JSON: %s", sys.exc_info()[1])
  finally:
    if data is None:
      send_and_close(connection, 400, {"message": "JSON Parse Error"})
    elif data == 'ping':
      send_and_close(connection, 200, {"started": started, "queue": queue_obj.qsize()})
    elif data == 'stop':
      send_and_close(connection, 200, {"message": "Shutting down"})
      os.kill(pid, signal.SIGUSR1)
    elif 'trackers' in data and 'hashes' in data:
      try:
        queue_obj.put({"data": [data, url, key], "address": address}, False)
        send_and_close(connection, 200, {"message": ("in queue [%r]" % (address,))})
      except Full:
        send_and_close(connection, 429, {"message": "Server queue is full. Try another one."})
示例#40
0
文件: http.py 项目: HVF/diesel
    def request(self, method, url, headers={}, body=None, timeout=None):
        '''Issues a `method` request to `path` on the
        connected server.  Sends along `headers`, and
        body.

        Very low level--you must set "host" yourself,
        for example.  It will set Content-Length,
        however.
        '''
        url_info = urlparse(url)
        fake_wsgi = dict(
        (cgi_name(n), v) for n, v in headers.iteritems())
        fake_wsgi.update({
            'HTTP_METHOD' : method,
            'SCRIPT_NAME' : '',
            'PATH_INFO' : url_info[2],
            'QUERY_STRING' : url_info[4],
            'wsgi.version' : (1,0),
            'wsgi.url_scheme' : 'http', # XXX incomplete
            'wsgi.input' : cStringIO.StringIO(body or ''),
            'wsgi.errors' : FileLikeErrorLogger(hlog),
            'wsgi.multithread' : False,
            'wsgi.multiprocess' : False,
            'wsgi.run_once' : False,
            })
        req = Request(fake_wsgi)

        timeout_handler = TimeoutHandler(timeout or 60)

        send('%s %s HTTP/1.1\r\n%s' % (req.method, req.url, str(req.headers)))

        if body:
            send(body)

        h = HttpParser()
        body = []
        data = None
        while True:
            if data:
                used = h.execute(data, len(data))
                if h.is_headers_complete():
                    body.append(h.recv_body())
                if h.is_message_complete():
                    data = data[used:]
                    break
            ev, val = first(receive_any=True, sleep=timeout_handler.remaining())
            if ev == 'sleep': timeout_handler.timeout()
            data = val

        resp = Response(
            response=''.join(body),
            status=h.get_status_code(),
            headers=h.get_headers(),
            )

        return resp
示例#41
0
def proxy(data):
    parser = HttpParser(0)
    parser.execute(data, len(data))
    path = parser.get_path()
    if path.startswith('/static'):
        path = os.path.join(ROOT, path[1:])
        if os.path.exists(path):
            fno = os.open(path, os.O_RDONLY)
            return {
                "file": fno,
                "reply": "HTTP/1.1 200 OK\r\n\r\n"
            }
        else:
            return {
                "close": True
            }
    return {
        "close": True
    }
示例#42
0
def findhue():  #Auto-find bridges on network & get list
    r = requests.get("https://discovery.meethue.com/")
    bridgelist = json.loads(r.text)
    i = 0
    for b in bridgelist:
        i += 1

    if commandlineargs.bridgeid is not None:
        found = False
        for idx, b in enumerate(bridgelist):
            if b["id"] == commandlineargs.bridgeid:
                bridge = idx
                found = True
                break
        if not found:
            sys.exit("bridge {} was not found".format(
                commandlineargs.bridgeid))
    elif len(bridgelist) > 1:
        print("Multiple bridges found. Select one of the bridges below (",
              list(bridgelist), ")")
        bridge = int(input())
    else:
        bridge = 0  #Default to the only bridge if only one is found

    hueip = bridgelist[bridge][
        'internalipaddress']  #Logic currently assumes 1 bridge on the network
    print("I will use the bridge at ", hueip)

    msg = \
        'M-SEARCH * HTTP/1.1\r\n' \
        'HOST:' + hueip +':1900\r\n' \
        'ST:upnp:rootdevice\r\n' \
        'MX:2\r\n' \
        'MAN:"ssdp:discover"\r\n' \
        '\r\n'
    s = socket(AF_INET, SOCK_DGRAM, IPPROTO_UDP)
    s.settimeout(12)
    s.sendto(msg.encode('utf-8'), (hueip, 1900))
    try:
        while True:
            data, addr = s.recvfrom(65507)
            p = HttpParser()
            recved = len(data)
            nparsed = p.execute(data, recved)
            assert nparsed == recved
            if p.is_headers_complete():
                headers = p.get_headers()
                if 'hue-bridgeid' in headers:
                    return addr, headers
            if p.is_message_complete():
                break
    except timeout:
        verbose('Timed out, better luck next time')
        pass
    return None
class Session:
  def __init__(self, current_key_hex, partner_key_hex):
    # self.body = []
    self.parser = HttpParser(kind=2, decompress=True)
    self.data_bytes = 0
    self.total_bytes = 0
    self.current_key_hex = current_key_hex
    self.partner_key_hex = partner_key_hex
    self.is_request = None
    self.service = None

  def getPartner(self):
    return sessions[partner_key_hex]

  def getService(self):
    if (self.is_request == False):
      return self.getPartner().getService()
    if (self.is_request is None):
      return '_unknown'
    if (self.service is None):
      self.service = getServiceForQS(self.parser.get_query_string())
    return self.service

  def eat(self, payload_string, bytes_sent):
    received_len = len(payload_string)
    self.data_bytes += received_len
    self.total_bytes += bytes_sent
    parsed_len = self.parser.execute(payload_string, received_len)
    # assert received_len == parsed_len

    # if self.parser.is_headers_complete():
    #   eprint(self.parser.get_headers())

    # if self.parser.is_partial_body():
    #   self.body.append(self.parser.recv_body())

    # if self.parser.is_message_complete():
    #   eprint("".join(self.body))

    if self.parser.get_status_code() is not 0:
      self.is_request = False
      addBytesOutboundFromService(bytes_sent, self.getService())
      # eprint(payload_string)
    elif self.parser.is_message_begin():
      self.is_request = True
      addBytesInboundToService(bytes_sent, self.getService())
    else:
      addBytesUnknownboundToService(bytes_sent, self.getService())

    # if (self.parser.is_headers_complete() and not self.parser.is_message_complete()):
    #   eprint("expected: %s, so far: %d" % (self.parser.get_headers().get('CONTENT-LENGTH'), self.data_bytes))

    if self.parser.is_message_complete():
      eprint("end!")
示例#44
0
def proxy(data):
    """
    the function called by tproxy to determine where to send traffic

    tproxy will call this function repeatedly for the same connection, as we
    receive more incoming data, until we return something other than None.

    typically our response tells tproxy where to proxy the connection to, but
    may also tell it to hang up, or respond with some error message.
    """

    log = logging.getLogger("proxy")

    bytes_received = len(data)

    parser = HttpParser()
    bytes_parsed = parser.execute(data, bytes_received)

    if bytes_parsed != bytes_received:
        return {'close': 'HTTP/1.0 400 Bad Request\r\n\r\nParse error'}

    if not parser.is_headers_complete():
        if bytes_received > MAX_HEADER_LENGTH:
            return {
                'close': 'HTTP/1.0 400 Bad Request\r\n'
                '\r\nHeaders are too large'
            }
        return None

    headers = parser.get_headers()

    # the hostname may be in the form of hostname:port, in which case we want
    # to discard the port, and route just on hostname
    route_host = headers.get('HOST', None)
    if route_host:
        match = _HOST_PORT_REGEXP.match(route_host)
        if match:
            route_host = match.group(1)

    try:
        log.debug("Routing %r" % (parser.get_url(), ))
        return _ROUTER.route(route_host, parser.get_method(),
                             parser.get_path(), parser.get_query_string())
    except Exception, err:
        log.error("error routing %r, %s" % (
            parser.get_url(),
            traceback.format_exc(),
        ))
        gevent.sleep(ERROR_DELAY)
        return {
            'close': 'HTTP/1.0 502 Gateway Error\r\n'
            '\r\nError routing request'
        }
示例#45
0
def proxy(data):
    """
    the function called by tproxy to determine where to send traffic

    tproxy will call this function repeatedly for the same connection, as we
    receive more incoming data, until we return something other than None.

    typically our response tells tproxy where to proxy the connection to, but
    may also tell it to hang up, or respond with some error message.
    """

    log = logging.getLogger("proxy")

    bytes_received = len(data)

    parser =  HttpParser()
    bytes_parsed = parser.execute(data, bytes_received)

    if bytes_parsed != bytes_received:
        return { 'close': 
            'HTTP/1.0 400 Bad Request\r\n\r\nParse error' }

    if not parser.is_headers_complete(): 
        if bytes_received > MAX_HEADER_LENGTH:
            return { 'close': 
                'HTTP/1.0 400 Bad Request\r\n'
                '\r\nHeaders are too large' }
        return None

    headers = parser.get_headers()

    # the hostname may be in the form of hostname:port, in which case we want
    # to discard the port, and route just on hostname
    route_host = headers.get('HOST', None)
    if route_host:
        match = _HOST_PORT_REGEXP.match(route_host)
        if match:
            route_host = match.group(1)

    try:
        log.debug("Routing %r" % ( parser.get_url(), ))
        return _ROUTER.route(
            route_host,
            parser.get_method(),
            parser.get_path(),
            parser.get_query_string())
    except Exception, err:
        log.error("error routing %r, %s" % (
            parser.get_url(), traceback.format_exc(), ))
        gevent.sleep(ERROR_DELAY)
        return { 'close': 
            'HTTP/1.0 502 Gateway Error\r\n'
            '\r\nError routing request' }
示例#46
0
	def parse_request(self, message):
		try:
		    from http_parser.parser import HttpParser
		except ImportError:
		    from http_parser.pyparser import HttpParser

		p = HttpParser()
		nparsed = p.execute(message,len(message))
		
		self.method = p.get_method()
		self.path = p.get_path()
		self.headers = p.get_headers()

		if p.get_method() == 'GET':
			self.status = 200

		#if "Range" in p.get_headers():
		#	strings = self.headers["Range"]
		#	print strings

		elif p.get_method() != 'GET':
			self.status = 501		#if the method is not a GET
			#TODO maybe make this a head request eventually if you do the download accelerator

		if not p.get_method():
			self.status = 400

		if p.get_path() == '/':
			self.path = '/index.html'

		elif p.get_path().endswith('/'):
			self.path += 'index.html'
		
		if p.get_path() is None:
			self.status = 501


		#print self.path
		"""
		print '\nMethod: ' 
		print p.get_method() 
		print '\nPath: ' 
		print p.get_path()
		print '\nHeaders: ' 
		print p.get_headers()
		print '\nVersion: '
		version = p.get_version()
		print version
		"""
		#print '\nRESPONSE CODE: ' + str(self.status) + '\n'
		#print self.path
		#print self.status
		#working so far
 def handshake(self):
     message = self.request.recv(1024).decode().strip()
     parser = HttpParser()
     parser.execute(message, len(message))
     upgrade = re.search('\nupgrade[\s]*:[\s]*websocket', message.lower())
     if not upgrade:
         self.keep_alive = False
         return
     key = re.search('\n[sS]ec-[wW]eb[sS]ocket-[kK]ey[\s]*:[\s]*(.*)\r\n',
                     message)
     if key:
         key = key.group(1)
     else:
         logger.warning("Client tried to connect but was missing a key")
         self.keep_alive = False
         return
     response = self.make_handshake_response(key)
     self.handshake_done = self.request.send(response.encode())
     self.valid_client = True
     self.path = parser.get_url()
     self.server._new_client_(self)
示例#48
0
class HttpRequest(object):
    __cgi_config = None

    def __init__(self, request_text, server_config):
        self.__parser = HttpParser()
        self.__parser.execute(request_text, len(request_text))
        self.__server_config = server_config

    def get_body(self):
        if self.__parser.is_partial_body():
            return self.__parser.recv_body()
        return None

    def get_headers(self):
        return self.__parser.get_headers()

    def get_request_method(self):
        return self.__parser.get_method()

    def get_request_path(self):
        return self.__parser.get_path()

    def get_cgi_config(self):
        if self.__cgi_config is None:
            __cgi_config = {}
            #WSGI required variable
            #__cgi_config['wsgi.input'] = io.StringIO(self.get_body())

            #CGI
            __cgi_config['SERVER_NAME'] = self.__server_config['server_name']
            __cgi_config['SERVER_PORT'] = self.__server_config['server_port']
            __cgi_config['SERVER_PROTOCOL']: 'HTTP/1.1'
            __cgi_config['REQUEST_METHOD'] = self.get_request_method()
            __cgi_config['PATH_INFO'] = self.get_request_path()

            for header, value in self.get_headers().items():
                __cgi_config[f'HTTP_{header}'] = value

            self.__cgi_config = __cgi_config
        return self.__cgi_config
示例#49
0
    def parse_html(self):
        try:
            resolve_ip = ''
            data = []
            fitler_list = ['*', '> ', '< ', '{']
            for item in self.result.split("\n"):
                if 'Trying' in item:
                    resolve_ip = item.replace('*', "").replace(
                        "Trying", "").replace("...", "").strip()
                    log.logger.info('resolve_ip: %s ' % (resolve_ip))

                matching = [s for s in fitler_list if s in item[:2]]
                if len(matching) == 0:
                    data.append(item.encode('utf-8'))
            parsing_string = b("\r\n").join(data)
            p = HttpParser()
            p.execute(parsing_string, len(parsing_string))
            status_code = str(p.get_status_code())
            header_obj = p.get_headers()
            #body = str(p.recv_body())

            header_list = []
            if resolve_ip:
                header_list.append('%s:%s' %
                                   ("resolve ip", resolve_ip.strip()))
            for key, value in header_obj.items():
                header_list.append('%s:%s' % (key, value))
            header = ("<br/>").join(header_list)

            body = self.content["result"]

            log.logger.info('resolve_ip :%s ' % (resolve_ip))
            log.logger.info('status_code :%s ' % (status_code))
            log.logger.info('header :%s ' % (header))
            log.logger.info('body :%s ' % (body))

            return status_code, header, body
        except Exception as e:
            log.logger.info('Exception: %s ' % (str(e)))
            return None, None, str(e)
示例#50
0
文件: http.py 项目: levigross/vaurien
    def _handle(self, source, dest, to_backend):
        buffer_size = self.option('buffer')

        # Getting the HTTP query and sending it to the backend.
        parser = HttpParser()
        while not parser.is_message_complete():
            data = self._get_data(source, buffer_size)
            if not data:
                self._abort_handling(to_backend, dest)
                return False
            nparsed = parser.execute(data, len(data))
            assert nparsed == len(data)
            data = HOST_REPLACE.sub('\r\nHost: %s\r\n'
                                    % self.proxy.backend, data)
            dest.sendall(data)

        # Getting the HTTP response and sending it back to the source.
        parser = HttpParser()
        while not parser.is_message_complete():
            data = self._get_data(dest, buffer_size)
            if not data:
                self._abort_handling(to_backend, dest)
                return False
            nparsed = parser.execute(data, len(data))
            assert nparsed == len(data)
            source.sendall(data)

        keep_alive = parser.should_keep_alive()

        # do we close the client ?
        if not keep_alive and not self.option('keep_alive'):
            source.close()
            source._closed = True

        if not self.option('reuse_socket') and not self.option('keep_alive'):
            dest.close()
            dest._closed = True

        # we're done
        return keep_alive or self.option('keep_alive')
示例#51
0
文件: proxy.py 项目: benoitc/hroute
    def proxy(self, data):
        # parse headers
        recved = len(data)
        parser = HttpParser()
        nparsed = parser.execute(data, recved)
        if nparsed != recved:
            return {"close": True}

        if not parser.is_headers_complete():
            return

        # get remote
        return self.lookup(parser)
示例#52
0
文件: http.py 项目: HVF/diesel
    def __call__(self, addr):
        '''Since an instance of HttpServer is passed to the Service
        class (with appropriate request_handler established during
        initialization), this __call__ method is what's actually
        invoked by diesel.
        '''
        data = None
        while True:
            try:
                h = HttpParser()
                body = []
                while True:
                    if data:
                        used = h.execute(data, len(data))
                        if h.is_headers_complete():
                            body.append(h.recv_body())
                        if h.is_message_complete():
                            data = data[used:]
                            break
                    data = receive()

                env = h.get_wsgi_environ()

                env.update({
                    'wsgi.version' : (1,0),
                    'wsgi.url_scheme' : 'http', # XXX incomplete
                    'wsgi.input' : cStringIO.StringIO(''.join(body)),
                    'wsgi.errors' : FileLikeErrorLogger(hlog),
                    'wsgi.multithread' : False,
                    'wsgi.multiprocess' : False,
                    'wsgi.run_once' : False,
                    })
                req = Request(env)

                resp = self.request_handler(req)
                if 'Server' not in resp.headers:
                    resp.headers.add('Server', SERVER_TAG)
                if 'Date' not in resp.headers:
                    resp.headers.add('Date', utcnow().strftime("%a, %d %b %Y %H:%M:%S UTC"))

                assert resp, "HTTP request handler _must_ return a response"

                self.send_response(resp, version=h.get_version())

                if (not h.should_keep_alive()) or \
                    resp.headers.get('Connection', '').lower() == "close" or \
                    resp.headers.get('Content-Length') == None:
                    return

            except ConnectionClosed:
                break
示例#53
0
 def handle(self):
     p = HttpParser()
     while True:
         data = self.socket.recv(ProxyHandler.BUFF_LEN)
         self.buff += data
         size = len(data)
         p.execute(data, size)
         if p.is_message_complete():
             break
     remote_url = p.get_headers().get('Host')
     remote_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
     remote_sock.connect((remote_url, 80))
     p2 = HttpParser()
     bf = ""
     remote_sock.send(self.buff)
     while True:
         data = remote_sock.recv(ProxyHandler.BUFF_LEN)
         bf += data
         s = len(data)
         p2.execute(data, s)
         if p2.is_message_complete():
             self.socket.send(bf)
             break
示例#54
0
def heartbeat(sock):
    ip, port = sock.getpeername()
    parser = HttpParser()
    sock.send("GET /ping HTTP/1.1\r\nHost: %s:%d\r\n\r\n" % (ip, port))

    while True:
        data = sock.recv(1024)
        if not data:
            return False

        recved = len(data)
        nparsed = parser.execute(data, recved)
        assert nparsed == recved
        if parser.is_message_complete():
            break

    return parser.get_status_code() == 200
示例#55
0
    def handleData(self,fd):
        self.debug("Entering handleData")
        if '\r\n\r\n' not in self.con_cache[fd]:
            self.debug("Partial message - Exiting handleData")
            return 
        p = HttpParser() 
        nparsed = p.execute(self.con_cache[fd],len(self.con_cache[fd]))                 
        
        method = p.get_method()
        path = p.get_path()
        headers = p.get_headers() 
        debugStr = "\nMethod: %s\nPath: %s\nHeaders: %s\n" % (method,path,headers)  
        #self.debug(debugStr)
       
        rangeRequest = None 
        if 'Range' in headers:
            rangeRequest = headers['Range']
            self.debug("Range: %s" % (rangeRequest))
           

        validMethods = ['GET','HEAD','PUT','DELETE','POST']
        isValid = False 
        
        if method not in validMethods:
            response = self.makeError('400','Bad Request')
        elif method != 'GET' and method != 'HEAD':
            response = self.makeError('501','Not Implemented')
        else:
            if path == '/':
                path = '/index.html'
                
            path = self.hosts['default'] + path 
            (isValid,response) = self.makeResponse(path,rangeRequest) 

        self.clients[fd].send(response)
         
        self.debug("PATH:%s"%(path))
       
        if isValid and not rangeRequest and method != "HEAD":
            self.sendAll(path,fd) 
        elif isValid and rangeRequest and method != "HEAD":
            (start,end) = self.getByteRange(rangeRequest) 
            self.send(path,fd,start,end) 

        self.debug("Exiting handleData") 
示例#56
0
文件: upstream.py 项目: dtrip/proxpy
    def makeRequest(self, host, url="/", port=80, method='GET', headers=None, postdata=None):
        assert self.e is not None
        evSet = self.e.wait()  # noqa: F841
        # log.debug("Generating raw http request")
        self.s.connect((host, port))

        if headers is None:
            headers = {
                    "Accept": "*/*",
                    "User-Agent": self.useragent
            }

        req = self.rawHttpReq(host, url, method, headers, postdata)

        self.s.sendall(req.encode())

        h = []
        body = []
        p = HttpParser()
        tlen = 0

        while True:
            data = self.s.recv(2048)

            if not data:
                break

            rlen = len(data)
            tlen += rlen
            nparsed = p.execute(data, rlen)
            assert nparsed == rlen

            if p.is_headers_complete():
                h = p.get_headers()
                # log.debug(p.get_headers())
            if p.is_partial_body():
                body.append(p.recv_body())

            if p.is_message_complete():
                break

        self.s.close()

        res = {'status': p.get_status_code(), 'length': tlen, 'headers': h, 'body': body, 'request': req}
        print(res)
示例#57
0
    def receive(self):
        h = HttpParser()
        body = []
        data = None
        while True:
            if data:
                used = h.execute(data, len(data))
                if h.is_headers_complete():
                    body.append(h.recv_body())
                if h.is_message_complete():
                    data = data[used:]
                    break
            data = self.s.recv(BUFSIZE)

        return Response(response=''.join(body),
                        status=h.get_status_code(),
                        headers=h.get_headers(),
                        )
示例#58
0
	def get_appropriate_response(self):

		try:
			# try to use the fast C parser
			from http_parser.parser import HttpParser
		except ImportError:
			# fall back to the Python parser
			from http_parser.pyparser import HttpParser

		p = HttpParser()
		nparsed = p.execute(self.content.encode('utf-8'), len(self.content))

		if not p.is_headers_complete():
			return HttpResponseBadRequest(content_f=BAD_REQUEST_HTML)

		# check method
		if p.get_method() not in SUPPORTED_METHODS:
			return HttpResponseNotImplemented(content_f=NOT_IMPLEMENTED_HTML)

		base_filepath = ''
		try:
			base_filepath = settings.HOSTS[p.get_headers()['Host'].split(':')[0]]
		except KeyError:
			base_filepath = settings.HOSTS['default']

		req_file = self.content.split(' ')[1]
		if req_file == '/':
			req_file = '/index.html'

		try:
			full_path = base_filepath + req_file
			open(full_path)
			if p.get_method() == 'HEAD':
				return HttpResponse(content_f=full_path, method='HEAD')
			if 'Range' in p.get_headers():
				return HttpResponsePartialContent(content_f=full_path, h_range=p.get_headers()['Range'])	
			return HttpResponse(content_f=full_path)
		except IOError as err:
			if err.errno == 13:
				return HttpResponseForbidden(content_f=FORBIDDEN_HTML)
			elif err.errno == 2:
				return HttpResponseNotFound(content_f=NOT_FOUND_HTML)

		return HttpResponseServerError(content_f=SERVER_ERROR_HTML)