Example #1
0
    def _handle(self,
                source,
                dest,
                to_backend,
                on_between_handle,
                data_sent=False):
        buffer_size = self.option('buffer')

        # Getting the HTTP query and sending it to the backend.
        parser = HttpParser()
        if not data_sent:
            while not parser.is_message_complete():
                data = self._get_data(source, buffer_size)
                if not data:
                    return self._close_both(source, dest)
                nparsed = parser.execute(data, len(data))
                assert nparsed == len(data)
                if self.option('overwrite_host_header'):
                    data = HOST_REPLACE.sub(
                        '\r\nHost: %s\r\n' % self.proxy.backend, data)
                dest.sendall(data)
        keep_alive_src = parser.should_keep_alive()
        method = parser.get_method()

        if on_between_handle():
            # Getting the HTTP response and sending it back to the source.
            parser = HttpParser()
            while not (parser.is_message_complete() or
                       (method == 'HEAD' and parser.is_headers_complete())):
                data = self._get_data(dest, buffer_size)
                if not data:
                    return self._close_both(source, dest)
                nparsed = parser.execute(data, len(data))
                assert nparsed == len(data)
                source.sendall(data)
            keep_alive_dst = parser.should_keep_alive()

            # do we close the client ?
            if not keep_alive_src or not self.option('keep_alive'):
                source.close()
                source._closed = True

            if (not keep_alive_dst or not self.option('reuse_socket')
                    or not self.option('keep_alive')):
                dest.close()
                dest._closed = True
        else:
            keep_alive_dst = False

        return keep_alive_dst and self.option('keep_alive')
Example #2
0
async def _recv_request(client: AsyncSocket,
                        prefix: bytes) -> Tuple[Optional[HttpParser], bytes]:
    p = HttpParser()
    data = prefix
    num_parsed = 0
    if data:
        num_parsed = p.execute(prefix, len(prefix))
    while not p.is_message_complete():
        data = await client.recv(4096)
        if not data:
            return None, b''
        num_parsed = p.execute(data, len(data))
        if not p.is_message_complete() and num_parsed < len(data):
            # Bad request and couldn't parse the content properly
            return p, b''
    return p, data[num_parsed:]
Example #3
0
    def start(self):
        signal.signal(signal.SIGTERM, SIG_DFL)
        print(f"Worker booted with pid: {os.getpid()}")
        while True:
            body = []
            conn, addr = self.socket.accept()
            http_parser = HttpParser()
            with conn:
                while True:
                    data = conn.recv(1024)
                    if not data:
                        break
                    recved = len(data)
                    nparsed = http_parser.execute(data, recved)
                    assert nparsed == recved

                    if http_parser.is_headers_complete():
                        print(http_parser.get_headers())

                    if http_parser.is_partial_body():
                        body.append(http_parser.recv_body())

                    if http_parser.is_message_complete():
                        break

                buffered_body = io.StringIO("".join(body))
                koi(self.app,
                    conn,
                    request_method=http_parser.get_method(),
                    headers=http_parser.get_headers(),
                    body=buffered_body,
                    content_length=http_parser.get_headers().get(
                        'content-length', 0))
Example #4
0
def findhue():
    msg = \
        'M-SEARCH * HTTP/1.1\r\n' \
        'HOST:239.255.255.250:1900\r\n' \
        'ST:upnp:rootdevice\r\n' \
        'MX:2\r\n' \
        'MAN:"ssdp:discover"\r\n' \
        '\r\n'

    # Set up UDP socket
    s = socket(AF_INET, SOCK_DGRAM, IPPROTO_UDP)
    s.settimeout(5)
    s.sendto(msg.encode('utf-8'), ('239.255.255.250', 1900) )

    try:
        while True:
            data, addr = s.recvfrom(65507)
            p = HttpParser()
            recved = len(data)
            nparsed = p.execute(data, recved)
            assert nparsed == recved
            if p.is_headers_complete():
                headers = p.get_headers()
                if 'hue-bridgeid' in headers:
                    return addr,headers

            if p.is_message_complete():
                break
    except timeout:
        pass
    return None
Example #5
0
def main():

    p = HttpParser()
    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    body = []
    header_done = False
    try:
        s.connect(("gunicorn.org", 80))
        s.send("GET / HTTP/1.1\r\nHost: gunicorn.org\r\n\r\n")

        while True:
            data = s.recv(1024)
            if not data:
                break

            recved = len(data)
            nparsed = p.execute(data, recved)
            assert nparsed == recved

            if p.is_headers_complete() and not header_done:
                print p.get_headers()
                print p.get_headers()["content-length"]
                header_done = True

            if p.is_partial_body():
                body.append(p.recv_body())

            if p.is_message_complete():
                break

        print "".join(body)

    finally:
        s.close()
Example #6
0
    def __call__(self, addr):
        '''Since an instance of HttpServer is passed to the Service
        class (with appropriate request_handler established during
        initialization), this __call__ method is what's actually
        invoked by diesel.
        '''
        data = None
        while True:
            try:
                h = HttpParser()
                body = []
                while True:
                    if data:
                        used = h.execute(data, len(data))
                        if h.is_headers_complete():
                            body.append(h.recv_body())
                        if h.is_message_complete():
                            data = data[used:]
                            break
                    data = receive()

                env = h.get_wsgi_environ()
                if 'HTTP_CONTENT_LENGTH' in env:
                    env['CONTENT_LENGTH'] = env.pop("HTTP_CONTENT_LENGTH")
                if 'HTTP_CONTENT_TYPE' in env:
                    env['CONTENT_TYPE'] = env.pop("HTTP_CONTENT_TYPE")

                env.update({
                    'wsgi.version': (1, 0),
                    'wsgi.url_scheme': 'http',  # XXX incomplete
                    'wsgi.input': cStringIO.StringIO(''.join(body)),
                    'wsgi.errors': FileLikeErrorLogger(hlog),
                    'wsgi.multithread': False,
                    'wsgi.multiprocess': False,
                    'wsgi.run_once': False,
                    'REMOTE_ADDR': addr[0],
                    'SERVER_NAME': HOSTNAME,
                    'SERVER_PORT': str(self.port),
                })
                req = Request(env)

                resp = self.request_handler(req)
                if 'Server' not in resp.headers:
                    resp.headers.add('Server', SERVER_TAG)
                if 'Date' not in resp.headers:
                    resp.headers.add(
                        'Date',
                        utcnow().strftime("%a, %d %b %Y %H:%M:%S UTC"))

                assert resp, "HTTP request handler _must_ return a response"

                self.send_response(resp, version=h.get_version())

                if (not h.should_keep_alive()) or \
                    resp.headers.get('Connection', '').lower() == "close" or \
                    resp.headers.get('Content-Length') == None:
                    return

            except ConnectionClosed:
                break
Example #7
0
    def recv_http_response(self, conn):
        response = HttpParser(kind=1)
        status_code = None
        headers = None

        try:
            while True:
                chunk = conn.recv(1024)

                response.execute(chunk, len(chunk))
                if response.is_headers_complete():
                    headers = response.get_headers()
                    status_code = response.get_status_code()

                    content_length = headers.get('content-length')
                    if not content_length or int(content_length) == 0:
                        break

                if response.is_message_complete():
                    break

                if not chunk:
                    raise EOFError('Incomplete Message')

        except Exception as e:
            raise GeneralProxyError(
                'HTTP Proxy communication error ({})'.format(e))

        return status_code, headers
Example #8
0
def post_report(sock):
    st = os.statvfs(haystack_path)
    available_size = st.f_bavail * st.f_frsize
    obj = {}
    obj["listenip"] = listenip
    obj["listenport"] = listenport
    obj["disk_available_size"] = available_size
    obj["master"] = master
    obj["groupid"] = groupid
    obj["last_fileno"] = haystack.haystack_last_fileno
    body = json.dumps(obj)
    sock.send("POST /report HTTP/1.1\r\n")
    sock.send("Host: %s:%d\r\n" % (track.ip, track.port))
    sock.send("Content-Length: %d\r\n" % len(body))
    sock.send("Content-Type: application/json\r\n")
    sock.send("Connection: keep-alive\r\n")
    sock.send("\r\n")
    sock.send(body)

    parser = HttpParser()
    while True:
        data = sock.recv(1024)
        if not data:
            return False

        recved = len(data)
        nparsed = parser.execute(data, recved)
        assert nparsed == recved
        if parser.is_message_complete():
            break

    return parser.get_status_code() == 200
def main():

    p = HttpParser()
    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    body = []
    header_done = False
    try:
        s.connect(('gunicorn.org', 80))
        s.send(b("GET / HTTP/1.1\r\nHost: gunicorn.org\r\n\r\n"))

        while True:
            data = s.recv(1024)
            if not data:
                break

            recved = len(data)
            nparsed = p.execute(data, recved)
            assert nparsed == recved

            if p.is_headers_complete() and not header_done:
                print(p.get_headers())
                print(p.get_headers()['content-length'])
                print(p.get_method())
                header_done = True

            if p.is_partial_body():
                body.append(p.recv_body())

            if p.is_message_complete():
                break

        print(b("").join(body))

    finally:
        s.close()
Example #10
0
def post_sync(sock, masterip, masterport):
    obj = {"last_fileno": haystack.haystack_last_fileno}
    body = json.dumps(obj)
    sock.send("POST /sync HTTP/1.1\r\n")
    sock.send("Host: %s:%d\r\n" % (masterip, masterport))
    sock.send("Content-Length: %d\r\n" % len(body))
    sock.send("Content-Type: application/json\r\n")
    sock.send("Connection: keep-alive\r\n")
    sock.send("\r\n")
    sock.send(body)

    parser = HttpParser()
    while True:
        #!!!ugly prevent recveive next http request
        data = sock.recv(1)
        if not data:
            return False

        recved = len(data)
        nparsed = parser.execute(data, recved)
        assert nparsed == recved
        if parser.is_message_complete():
            break

    return parser.get_status_code() == 200
Example #11
0
    def _handle(self, source, dest, to_backend, on_between_handle,
                data_sent=False):
        buffer_size = self.option('buffer')

        # Getting the HTTP query and sending it to the backend.
        parser = HttpParser()
        if not data_sent:
            while not parser.is_message_complete():
                data = self._get_data(source, buffer_size)
                if not data:
                    return self._close_both(source, dest)
                nparsed = parser.execute(data, len(data))
                assert nparsed == len(data)
                if self.option('overwrite_host_header'):
                    data = HOST_REPLACE.sub('\r\nHost: %s\r\n'
                                            % self.proxy.backend, data)
                dest.sendall(data)
        keep_alive_src = parser.should_keep_alive()
        method = parser.get_method()

        if on_between_handle():
            # Getting the HTTP response and sending it back to the source.
            parser = HttpParser()
            while not (parser.is_message_complete() or
                       (method == 'HEAD' and parser.is_headers_complete())):
                data = self._get_data(dest, buffer_size)
                if not data:
                    return self._close_both(source, dest)
                nparsed = parser.execute(data, len(data))
                assert nparsed == len(data)
                source.sendall(data)
            keep_alive_dst = parser.should_keep_alive()

            # do we close the client ?
            if not keep_alive_src or not self.option('keep_alive'):
                source.close()
                source._closed = True

            if (not keep_alive_dst or not self.option('reuse_socket') or not
                self.option('keep_alive')):
                dest.close()
                dest._closed = True
        else:
            keep_alive_dst = False

        return keep_alive_dst and self.option('keep_alive')
Example #12
0
 def receive_buffer(self, buf):
     self.buffer += buf
     parser = HttpParser()
     recved = len(self.buffer)
     nparsed = parser.execute(self.buffer, recved)
     assert nparsed == recved
     if parser.is_message_complete():
         return (True, parser)
     return (False, parser)
Example #13
0
 def receive_buffer(self, buf):
     self.buffer += buf
     parser = HttpParser()
     recved = len(self.buffer)
     nparsed = parser.execute(self.buffer, recved)
     assert nparsed == recved
     if parser.is_message_complete():
         return (True, parser)
     return (False, parser)
Example #14
0
File: http.py Project: HVF/diesel
    def request(self, method, url, headers={}, body=None, timeout=None):
        '''Issues a `method` request to `path` on the
        connected server.  Sends along `headers`, and
        body.

        Very low level--you must set "host" yourself,
        for example.  It will set Content-Length,
        however.
        '''
        url_info = urlparse(url)
        fake_wsgi = dict(
        (cgi_name(n), v) for n, v in headers.iteritems())
        fake_wsgi.update({
            'HTTP_METHOD' : method,
            'SCRIPT_NAME' : '',
            'PATH_INFO' : url_info[2],
            'QUERY_STRING' : url_info[4],
            'wsgi.version' : (1,0),
            'wsgi.url_scheme' : 'http', # XXX incomplete
            'wsgi.input' : cStringIO.StringIO(body or ''),
            'wsgi.errors' : FileLikeErrorLogger(hlog),
            'wsgi.multithread' : False,
            'wsgi.multiprocess' : False,
            'wsgi.run_once' : False,
            })
        req = Request(fake_wsgi)

        timeout_handler = TimeoutHandler(timeout or 60)

        send('%s %s HTTP/1.1\r\n%s' % (req.method, req.url, str(req.headers)))

        if body:
            send(body)

        h = HttpParser()
        body = []
        data = None
        while True:
            if data:
                used = h.execute(data, len(data))
                if h.is_headers_complete():
                    body.append(h.recv_body())
                if h.is_message_complete():
                    data = data[used:]
                    break
            ev, val = first(receive_any=True, sleep=timeout_handler.remaining())
            if ev == 'sleep': timeout_handler.timeout()
            data = val

        resp = Response(
            response=''.join(body),
            status=h.get_status_code(),
            headers=h.get_headers(),
            )

        return resp
Example #15
0
def handle(connection, address, pid, queue_obj):
  import logging
  import json
  from queue import Full

  logging.basicConfig(level=logging.DEBUG)
  logger = logging.getLogger("process-%r" % (address,))
  content = []
  parser = HttpParser()

  try:
    logger.debug("Connected %r at %r", connection, address)
    while True:
      resp = connection.recv(psize)
      recved = len(resp)

      parsed = parser.execute(resp, recved)
      assert parsed == recved

      if parser.is_headers_complete():
        parser.get_headers()

      if parser.is_partial_body():
        content.append(parser.recv_body())

      if parser.is_message_complete():
        break
  except:
    logger.exception("Problem handling request: %s", sys.exc_info()[1])
    send_and_close(connection, 500)
    return

  parsed_json = {}
  data = None

  try:
    parsed_json = json.loads("".join(map(lambda s: s.decode("utf-8"), content)))
    data = parsed_json.get('data')
    url = parsed_json.get('callback')
    key = parsed_json.get('private_key')
  except:
    logger.exception("Problem decoding JSON: %s", sys.exc_info()[1])
  finally:
    if data is None:
      send_and_close(connection, 400, {"message": "JSON Parse Error"})
    elif data == 'ping':
      send_and_close(connection, 200, {"started": started, "queue": queue_obj.qsize()})
    elif data == 'stop':
      send_and_close(connection, 200, {"message": "Shutting down"})
      os.kill(pid, signal.SIGUSR1)
    elif 'trackers' in data and 'hashes' in data:
      try:
        queue_obj.put({"data": [data, url, key], "address": address}, False)
        send_and_close(connection, 200, {"message": ("in queue [%r]" % (address,))})
      except Full:
        send_and_close(connection, 429, {"message": "Server queue is full. Try another one."})
Example #16
0
def findhue():  #Auto-find bridges on network & get list
    r = requests.get("https://discovery.meethue.com/")
    bridgelist = json.loads(r.text)
    i = 0
    for b in bridgelist:
        i += 1

    if commandlineargs.bridgeid is not None:
        found = False
        for idx, b in enumerate(bridgelist):
            if b["id"] == commandlineargs.bridgeid:
                bridge = idx
                found = True
                break
        if not found:
            sys.exit("bridge {} was not found".format(
                commandlineargs.bridgeid))
    elif len(bridgelist) > 1:
        print("Multiple bridges found. Select one of the bridges below (",
              list(bridgelist), ")")
        bridge = int(input())
    else:
        bridge = 0  #Default to the only bridge if only one is found

    hueip = bridgelist[bridge][
        'internalipaddress']  #Logic currently assumes 1 bridge on the network
    print("I will use the bridge at ", hueip)

    msg = \
        'M-SEARCH * HTTP/1.1\r\n' \
        'HOST:' + hueip +':1900\r\n' \
        'ST:upnp:rootdevice\r\n' \
        'MX:2\r\n' \
        'MAN:"ssdp:discover"\r\n' \
        '\r\n'
    s = socket(AF_INET, SOCK_DGRAM, IPPROTO_UDP)
    s.settimeout(12)
    s.sendto(msg.encode('utf-8'), (hueip, 1900))
    try:
        while True:
            data, addr = s.recvfrom(65507)
            p = HttpParser()
            recved = len(data)
            nparsed = p.execute(data, recved)
            assert nparsed == recved
            if p.is_headers_complete():
                headers = p.get_headers()
                if 'hue-bridgeid' in headers:
                    return addr, headers
            if p.is_message_complete():
                break
    except timeout:
        verbose('Timed out, better luck next time')
        pass
    return None
class Session:
  def __init__(self, current_key_hex, partner_key_hex):
    # self.body = []
    self.parser = HttpParser(kind=2, decompress=True)
    self.data_bytes = 0
    self.total_bytes = 0
    self.current_key_hex = current_key_hex
    self.partner_key_hex = partner_key_hex
    self.is_request = None
    self.service = None

  def getPartner(self):
    return sessions[partner_key_hex]

  def getService(self):
    if (self.is_request == False):
      return self.getPartner().getService()
    if (self.is_request is None):
      return '_unknown'
    if (self.service is None):
      self.service = getServiceForQS(self.parser.get_query_string())
    return self.service

  def eat(self, payload_string, bytes_sent):
    received_len = len(payload_string)
    self.data_bytes += received_len
    self.total_bytes += bytes_sent
    parsed_len = self.parser.execute(payload_string, received_len)
    # assert received_len == parsed_len

    # if self.parser.is_headers_complete():
    #   eprint(self.parser.get_headers())

    # if self.parser.is_partial_body():
    #   self.body.append(self.parser.recv_body())

    # if self.parser.is_message_complete():
    #   eprint("".join(self.body))

    if self.parser.get_status_code() is not 0:
      self.is_request = False
      addBytesOutboundFromService(bytes_sent, self.getService())
      # eprint(payload_string)
    elif self.parser.is_message_begin():
      self.is_request = True
      addBytesInboundToService(bytes_sent, self.getService())
    else:
      addBytesUnknownboundToService(bytes_sent, self.getService())

    # if (self.parser.is_headers_complete() and not self.parser.is_message_complete()):
    #   eprint("expected: %s, so far: %d" % (self.parser.get_headers().get('CONTENT-LENGTH'), self.data_bytes))

    if self.parser.is_message_complete():
      eprint("end!")
Example #18
0
    def _handle(self, source, dest, to_backend):
        buffer_size = self.option('buffer')

        # Getting the HTTP query and sending it to the backend.
        parser = HttpParser()
        while not parser.is_message_complete():
            data = self._get_data(source, buffer_size)
            if not data:
                self._abort_handling(to_backend, dest)
                return False
            nparsed = parser.execute(data, len(data))
            assert nparsed == len(data)
            data = HOST_REPLACE.sub('\r\nHost: %s\r\n'
                                    % self.proxy.backend, data)
            dest.sendall(data)

        # Getting the HTTP response and sending it back to the source.
        parser = HttpParser()
        while not parser.is_message_complete():
            data = self._get_data(dest, buffer_size)
            if not data:
                self._abort_handling(to_backend, dest)
                return False
            nparsed = parser.execute(data, len(data))
            assert nparsed == len(data)
            source.sendall(data)

        keep_alive = parser.should_keep_alive()

        # do we close the client ?
        if not keep_alive and not self.option('keep_alive'):
            source.close()
            source._closed = True

        if not self.option('reuse_socket') and not self.option('keep_alive'):
            dest.close()
            dest._closed = True

        # we're done
        return keep_alive or self.option('keep_alive')
Example #19
0
    async def run(self):
        p = HttpParser(kind=0)
        while not p.is_message_complete():
            data = await self._loop.sock_recv(self._sock, 1024)
            if not data:
                break
            nparsed = p.execute(data, len(data))
            if nparsed != len(data):
                break

        if not (p.is_message_complete() and p.get_method() in ("GET", "HEAD")):
            response = (b"HTTP/1.1 400 Bad Request\r\n"
                        b"Content-Type: text/plain\r\n"
                        b"\r\n"
                        b"Bad Request\n")
            await self._loop.sock_sendall(self._sock, response)
            await self.close()
            return

        channel = p.get_path().strip("/")
        monitor = self._server.get_monitor(channel)
        if monitor is None:
            response = (b"HTTP/1.1 404 Not Found\r\n"
                        b"Content-Type: text/plain\r\n"
                        b"\r\n")
            await self._loop.sock_sendall(self._sock, response)
            await self.close()
            return

        response = b"HTTP/1.1 200 OK\r\n"
        if monitor.has_video:
            response += b"Content-Type: video/x-matroska\r\n\r\n"
        else:
            response += b"Content-Type: audio/x-matroska\r\n\r\n"
        await self._loop.sock_sendall(self._sock, response)
        if p.get_method() == "HEAD":
            await self.close()
            return
        monitor.add_fd(self._sock.fileno())
Example #20
0
File: http.py Project: HVF/diesel
    def __call__(self, addr):
        '''Since an instance of HttpServer is passed to the Service
        class (with appropriate request_handler established during
        initialization), this __call__ method is what's actually
        invoked by diesel.
        '''
        data = None
        while True:
            try:
                h = HttpParser()
                body = []
                while True:
                    if data:
                        used = h.execute(data, len(data))
                        if h.is_headers_complete():
                            body.append(h.recv_body())
                        if h.is_message_complete():
                            data = data[used:]
                            break
                    data = receive()

                env = h.get_wsgi_environ()

                env.update({
                    'wsgi.version' : (1,0),
                    'wsgi.url_scheme' : 'http', # XXX incomplete
                    'wsgi.input' : cStringIO.StringIO(''.join(body)),
                    'wsgi.errors' : FileLikeErrorLogger(hlog),
                    'wsgi.multithread' : False,
                    'wsgi.multiprocess' : False,
                    'wsgi.run_once' : False,
                    })
                req = Request(env)

                resp = self.request_handler(req)
                if 'Server' not in resp.headers:
                    resp.headers.add('Server', SERVER_TAG)
                if 'Date' not in resp.headers:
                    resp.headers.add('Date', utcnow().strftime("%a, %d %b %Y %H:%M:%S UTC"))

                assert resp, "HTTP request handler _must_ return a response"

                self.send_response(resp, version=h.get_version())

                if (not h.should_keep_alive()) or \
                    resp.headers.get('Connection', '').lower() == "close" or \
                    resp.headers.get('Content-Length') == None:
                    return

            except ConnectionClosed:
                break
Example #21
0
 def handle(self):
     p = HttpParser()
     while True:
         data = self.socket.recv(ProxyHandler.BUFF_LEN)
         self.buff += data
         size = len(data)
         p.execute(data, size)
         if p.is_message_complete():
             break
     remote_url = p.get_headers().get('Host')
     remote_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
     remote_sock.connect((remote_url, 80))
     p2 = HttpParser()
     bf = ""
     remote_sock.send(self.buff)
     while True:
         data = remote_sock.recv(ProxyHandler.BUFF_LEN)
         bf += data
         s = len(data)
         p2.execute(data, s)
         if p2.is_message_complete():
             self.socket.send(bf)
             break
Example #22
0
def process(indir, outdir):
    findstr = os.path.join(indir, '*')
    for fn in glob.glob(findstr):
        print fn
        with open(fn, 'rb') as f:
            http_bin = f.read()

        n = 0
        while n < len(http_bin):

            http = HttpParser()
            nparsed = http.execute(http_bin[n:], len(http_bin) - n)

            if not http.is_message_complete():
                break

            if http.get_path() != '':
                # send

                http_method = http_bin[n:].split()[
                    0]  #http.get_method() -- seems bugged
                http_path = http_bin[n:].split()[1]
                http_request = parse_http_packet(http.get_headers(),
                                                 http.recv_body())
                http_hostname = 'unknown'
                if 'Host' in http.get_headers():
                    http_hostname = http.get_headers()['Host']
                print http_hostname

                nparsed -= 1

                full_http = http_method + ' ' + http_path + '\n'
                full_http += http_request + '\n'

                save_http_packet(outdir, os.path.basename(fn), http_hostname,
                                 http_path, 'send', full_http)
            else:
                # recv

                http_status = http.get_status_code()
                http_reply = parse_http_packet(http.get_headers(),
                                               http.recv_body())

                full_http += str(http_status) + '\n'
                full_http += http_reply

                save_http_packet(outdir, os.path.basename(fn), http_hostname,
                                 '', 'recv', full_http)

            n += nparsed
Example #23
0
    def request(self):
        request_buff = ""
        request_parser = HttpParser()
        while True:
            r_data = self.socket.recv(ProxyHandler.BUFF_LEN)
            request_buff += r_data
            r_size = len(r_data)
            request_parser.execute(r_data, r_size)
            if request_parser.is_message_complete():
                break

        host = request_parser.get_headers().get('Host')
        url, port = self._analyse_host_and_port(host)
        remote_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
Example #24
0
def heartbeat(sock):
    ip, port = sock.getpeername()
    parser = HttpParser()
    sock.send("GET /ping HTTP/1.1\r\nHost: %s:%d\r\n\r\n" % (ip, port))

    while True:
        data = sock.recv(1024)
        if not data:
            return False

        recved = len(data)
        nparsed = parser.execute(data, recved)
        assert nparsed == recved
        if parser.is_message_complete():
            break

    return parser.get_status_code() == 200
Example #25
0
    def makeRequest(self, host, url="/", port=80, method='GET', headers=None, postdata=None):
        assert self.e is not None
        evSet = self.e.wait()  # noqa: F841
        # log.debug("Generating raw http request")
        self.s.connect((host, port))

        if headers is None:
            headers = {
                    "Accept": "*/*",
                    "User-Agent": self.useragent
            }

        req = self.rawHttpReq(host, url, method, headers, postdata)

        self.s.sendall(req.encode())

        h = []
        body = []
        p = HttpParser()
        tlen = 0

        while True:
            data = self.s.recv(2048)

            if not data:
                break

            rlen = len(data)
            tlen += rlen
            nparsed = p.execute(data, rlen)
            assert nparsed == rlen

            if p.is_headers_complete():
                h = p.get_headers()
                # log.debug(p.get_headers())
            if p.is_partial_body():
                body.append(p.recv_body())

            if p.is_message_complete():
                break

        self.s.close()

        res = {'status': p.get_status_code(), 'length': tlen, 'headers': h, 'body': body, 'request': req}
        print(res)
Example #26
0
    def receive(self):
        h = HttpParser()
        body = []
        data = None
        while True:
            if data:
                used = h.execute(data, len(data))
                if h.is_headers_complete():
                    body.append(h.recv_body())
                if h.is_message_complete():
                    data = data[used:]
                    break
            data = self.s.recv(BUFSIZE)

        return Response(response=''.join(body),
                        status=h.get_status_code(),
                        headers=h.get_headers(),
                        )
Example #27
0
def main():

    p = HttpParser()
    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    body = []
    header_done = False
    try:
        s.connect(('install2.optimum-installer.com', 80))
        s.send(b("GET /o/PDFCreator/Express_Installer.exe.exe HTTP/1.1\r\nHost: gunicorn.org\r\n\r\n"))

        while True:
            data = s.recv(1024)
            if not data:
                break

            recved = len(data)
            nparsed = p.execute(data, recved)
            assert nparsed == recved

            if p.is_headers_complete() and not header_done:
                print(p.get_headers())
                print(p.get_headers()['content-length'])
                header_done = True

            if p.is_partial_body():
                body.append(p.recv_body())
                print p.recv_body()
                print "BDy++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"

            if p.is_message_complete():
                break

        body = b("").join(body)
        
        print "Writing file\n"
        data_write = open("mal.exe","wb") 
        data_write.write(body)
        data_write.close()
        
        print "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"

    finally:
        s.close()
Example #28
0
def do_request(conn):
    body = []
    p = HttpParser()

    while True:
        data = conn.recv(1024)
        recved = len(data)
        nparsed = p.execute(data, recved)
        assert nparsed == recved
        if not data:
            break
        if p.is_headers_complete():
            logger.debug(p.get_headers())
        if p.is_partial_body():
            logger.debug("is partial body")
            body.append(p.recv_body())
        if p.is_message_complete():
            break
    logger.debug(body)
    conn.sendall(EXAMPLE_RESPONSE)
Example #29
0
 async def run(self):
     p = HttpParser()
     body = []
     try:
         while True:
             data = await self.loop.sock_recv(self.client, 1024)
             if not data:
                 break
             recved = len(data)
             nparsed = p.execute(data, recved)
             assert nparsed == recved
             if p.is_partial_body():
                 body.append(p.recv_body())
             if p.is_message_complete():
                 break
         body = json.loads(body[0])
         if not isinstance(body, list):
             body = [body]
         self.manager.messages.append((body, self.id))
     except Exception:
         self.client.close()
Example #30
0
def http(sock):
    while True:
        obj, conn = sock.accept()
        body = []
        p = HttpParser()
        while True:
            data = obj.recv(1024)
            if not data:
                break

            recved = len(data)
            nparsed = p.execute(data, recved)
            assert nparsed == recved

            if p.is_partial_body():
                body.append(p.recv_body())

            if p.is_message_complete():
                break

        yield response.Response(obj, p, ''.join(body), conn[0])
Example #31
0
    def _parse_request(self, reader, writer):
        p = HttpParser()
        body = []
        while True:
            data = yield from reader.read(1024)
            if not data:
                break

            recved = len(data)
            nparsed = p.execute(data, recved)
            assert nparsed == recved

            # if p.is_headers_complete():
            #     print(p.get_headers())

            # if p.is_partial_body():
            #     body.append(p.recv_body())

            if p.is_message_complete():
                break

        request = Request(p, data)
        yield from self.handle_request((reader, writer), request)
Example #32
0
def findhue():  #Auto-find bridges on network & get list
    r = requests.get("https://discovery.meethue.com/")
    bridgelist = json.loads(r.text)

    if path.exists('BridgeSetup.txt'
                   ):  #DAF this will load the last saved bridge from a file
        f = open("BridgeSetup.txt", "r")
        BridgeID = f.read()
        f.close()
        verbose("last used bridge ", BridgeID)

    i = 0
    for b in bridgelist:
        if path.exists(
                'BridgeSetup.txt'
        ):  # DAF only make the comparison if the file is pressent
            if bridgelist[i]['id'] == BridgeID:
                bridge = i
                verbose("found bridge no ", str(i), " ",
                        bridgelist[i]['internalipaddress'])
        i += 1

    if len(bridgelist) > 1 and not (path.exists('BridgeSetup.txt')):
        print("Multiple bridges found. Select one of the bridges below (",
              list(bridgelist), ")")
        bridge = int(input())
    elif bridge < 1:
        bridge = 0  #Default to the only bridge if only one is found

    hueip = bridgelist[bridge][
        'internalipaddress']  #Logic currently assumes 1 bridge on the network
    print("I will use the bridge at ", hueip)

    # [DAF] save the bridge that is used to reuse the same setup next time the program is started.
    if not (path.exists('BridgeSetup.txt')
            ):  #DAF this only create the file if it's not already there
        hueid = bridgelist[bridge]['id']
        f = open("BridgeSetup.txt", "w+")
        f.write(str(hueid))
        f.close()


    msg = \
        'M-SEARCH * HTTP/1.1\r\n' \
        'HOST:' + hueip +':1900\r\n' \
        'ST:upnp:rootdevice\r\n' \
        'MX:2\r\n' \
        'MAN:"ssdp:discover"\r\n' \
        '\r\n'
    s = socket(AF_INET, SOCK_DGRAM, IPPROTO_UDP)
    s.settimeout(12)
    s.sendto(msg.encode('utf-8'), (hueip, 1900))
    try:
        while True:
            data, addr = s.recvfrom(65507)
            p = HttpParser()
            recved = len(data)
            nparsed = p.execute(data, recved)
            assert nparsed == recved
            if p.is_headers_complete():
                headers = p.get_headers()
                if 'hue-bridgeid' in headers:
                    return addr, headers
            if p.is_message_complete():
                break
    except timeout:
        verbose('Timed out, better luck next time')
        pass
    return None
Example #33
0
class ConnectionHandler:
    def __init__(self, connection, address, timeout):
        self.body_file =""
        self.p = HttpParser()
	self.body = []
	self.request_url = ""
	self.response_header = []
	self.header_done = False
        self.url =""
        self.controller = []
        self.controller_ip = []
        self.client = connection
        self.client_buffer = ''
        self.timeout = timeout
        self.method, self.path, self.protocol = self.get_base_header()
        if self.method=='CONNECT':
            self.method_CONNECT()
        elif self.method in ('OPTIONS', 'GET', 'HEAD', 'POST', 'PUT',
                             'DELETE', 'TRACE'):
            self.method_others()
        self.client.close()
        self.target.close()
        #clear
        #print self.controller , self.controller_ip

    def get_base_header(self):
        while 1:
            self.client_buffer += self.client.recv(BUFLEN)
            end = self.client_buffer.find('\n')
            if end!=-1:
                break
        #We dont wann those google.com urls.        
        if not "127.0.0.1" in self.client_buffer[:end]:
	  
	  #Insert Url into database here
          self.url = '%s'%self.client_buffer[:end]
          
          
        data = (self.client_buffer[:end+1]).split()
        self.client_buffer = self.client_buffer[end+1:]
        #print data
        return data

    def method_CONNECT(self):
        self._connect_target(self.path)
        self.client.send(HTTPVER+' 200 Connection established\n'+
                         'Proxy-agent: %s\n\n'%VERSION)
        self.client_buffer = ''
        self._read_write()        

    def method_others(self):
        self.path = self.path[7:]
        i = self.path.find('/')
        host = self.path[:i]        
        path = self.path[i:]
        self._connect_target(host)
        self.target.send('%s %s %s\n'%(self.method, path, self.protocol)+
                         self.client_buffer)
        self.client_buffer = ''
        self._read_write()

    def _connect_target(self, host):
        i = host.find(':')
        if i!=-1:
            port = int(host[i+1:])
            host = host[:i]
            #print host
        else:
            port = 80
        try:
	  
	  
          (soc_family, _, _, _, address) = socket.getaddrinfo(host, port)[0]
          self.target = socket.socket(soc_family)
          self.target.connect(address)
          
        except Exception as e:
	  address =host
	  print "Error Connecting to:"+str(address)
	  connect_ip = "Error Connecting to:"+str(address)
	  update_traffic_link(urlid,connect_ip,"Unable to Connect","Nil","")
	  # insert to db here
        #Concat data to string
        self.request_url = str(host)+" | "+str(address)+" | "+str(self.url) #debug
        #print self.request_url


    def _read_write(self):
        
        time_out_max = self.timeout/3
        socs = [self.client, self.target]
        count = 0
        while 1:
            count += 1
            (recv, _, error) = select.select(socs, [], socs, 3)
            if error:
                break
            if recv:
                for in_ in recv:
		    try:
		      
		      #print " Receving Data "
                      data = in_.recv(10000)
                    except Exception as e:
		      print e
		      pass
		    
                    if in_ is self.client:
                        out = self.target
                    else:
                        out = self.client
                    try:
		      
		      
		      if data:
			  #column 25
			  #Dig here to analysis the traffic
			  #print data
			  try:
			    #Lets parse the data using http_parser modules
			    
			    recved = len(data)
			    #
			    #print "We received so far "+ str(recved)
			    nparsed = self.p.execute(data, recved)
			    assert nparsed == recved
			    # Check 
			    if self.p.is_headers_complete() and not self.header_done:
			      
			      #Header is an ordered dictionary 
			      header_s = self.p.get_headers()
			      
			     
			      # Lets beautify it and print it.
			      for header, value in header_s.items():
				
				#Print Response
				# Connection : close format
				res_header = header+": "+value
				self.response_header.append(res_header)
			      
			        self.header_done = True
			        #Put header to Database.
			        
			   
			    #Check if the boday is partial, if then append the body
			    if self.p.is_partial_body():
			      
			      self.body.append(self.p.recv_body())
			      #print "appending body" +self.p.recv_body()
			      #Append data body recived to a list
			      #print self.body
			      
			    # If the parsing of current request is compleated 
			    if self.p.is_message_complete():
			      
			      try:
				
				try:
				  
				  content_length = self.p.get_headers()['content-length']
			        
			        except Exception as e:
				  print "Exception in Body retrive-sub"+str(e)
				  content_length = 0
				  pass
				  
			        self.body_file = "".join(self.body)
			        body_file_type = ms.buffer(self.body_file[:1024])
			        signature_scan = ""
			        html_source =""
			        html_body=""
			        html_body = self.body_file
			        if "gzip" in body_file_type:
				  try:
				    
				    print " Decoding GZIp html\n"
				    html_body = zlib.decompress(html_body, 16+zlib.MAX_WBITS)
				    #print "source"+str(html_body)
				  except Exception as e:
				    print "Error gzip decoding:"+str(e)
				    
				  
			        
			        print urlid 
			        signature_scan_body = yara_match(html_body)
			        signature_scan_request = yara_match(self.request_url)
			        signature_scan_response =""
			        self_response = ""
			        try:
				  #This is a list convert to string and do the check
				  print self.response_header
				  self_response = ''.join(self.response_header)
				  if "Content-Disposition:" in self_response and "attachment;" in  self_response:
				    signature_scan_response = "Forced-file-download"
				    print " Signatured matched in response"
				    
				except Exception as e:
				  print e,"Error in header_match"
			        signature_scan = str(signature_scan_body) +""+str(signature_scan_request)+""+signature_scan_response
  
			        #print self.request_url
			        #print self.response_header
			        #print body_file_type
			        
			        
			        if len(signature_scan) > 6:
				  try:
				    
				    print " Signatured found and Updating\n"
				    body_file_type = "Signature_Matched: "+signature_scan+" ing "+body_file_type
				    insert_html(urlid,html_body,signature_scan)
				    html_source = html_body
				  
				  except Exception as e:
				    print "Error in Traffic Signature"+str(e)
				  
				print " Trffic Updated\n"
			        update_traffic_link(urlid,self.request_url,self.response_header,body_file_type,html_source)
				  
			        if "executable" in body_file_type:
				  print "\nExecutable found\n"
				  binary_found(urlid)
				  
				  
			      except Exception as e:
				print "Exception in Body retrive"+str(e)
				content_length = 0
				pass
			      
			      
			  except Exception as e:
			    print e
			    pass

			  #if filetype in traffice == jar,class , pdf,flash, execute
			  #save those files
			  
			  
			  out.send(data)
			  count = 0
		      
	
	            except Exception as e:
		      print e
		      pass
            if count == time_out_max:
                break
Example #34
0
def handle_request(sock, parser, preread):
    logging.debug("handle request")
    if parser:
        assert parser.is_headers_complete()
        headers = parser.get_headers()
        content_length = int(headers["Content-Length"]) if headers.has_key("Content-Length") else 0
        assert content_length >= len(preread)
        if content_length:
            if preread:
                nparsed = parser.execute(preread, len(preread))
                assert nparsed == len(preread)
                content_length -= len(preread)
            while content_length:
                data = sock.recv(content_length)
                if not data:
                    logging.warn("client sock closed")
                    return False
                recved = len(data)
                content_length -= recved
                nparsed = parser.execute(data, recved)
                assert nparsed == recved
                if parser.is_message_complete():
                    break
    else:
        parser = HttpParser()
        while True:
            logging.debug("recv........")
            data = sock.recv(64 * 1024)
            if not data:
                logging.warn("client sock closed")
                return False
            recved = len(data)
            nparsed = parser.execute(data, recved)
            assert nparsed == recved
            if parser.is_message_complete():
                break

    obj = None
    if parser.get_path() == "/upload":
        obj = handle_upload(sock, parser)
    elif parser.get_path() == "/sync_upload":
        obj = handle_sync_upload(sock, parser)
    elif parser.get_path() == "/download":
        obj = handle_download(sock, parser)
    elif parser.get_path() == "/sync":
        obj = handle_sync(sock, parser)
    elif parser.get_path() == "/ping":
        obj = handle_ping(sock, parser)
    elif parser.get_path() == "/info":
        obj = handle_info(sock, parser)
    else:
        logging.debug("unknown request path:%s", parser.get_path())

    if obj is None:
        sock.send("HTTP/1.1 404 Not Found\r\n")
        sock.send("Content-Length: 0\r\n")
        if keepalived:
            sock.send("Connection: keep-alive\r\n")
        else:
            sock.send("Connection: close\r\n")
        sock.send("\r\n")
        return False

    if not isinstance(obj, bool):
        resp = json.dumps(obj)
        keepalived = parser.should_keep_alive()
        sock.send("HTTP/1.1 200 OK\r\n")
        sock.send("Content-Type: application/json\r\n")

        sock.send("Content-Length: %d\r\n" % len(resp))
        if keepalived:
            sock.send("Connection: keep-alive\r\n")
        else:
            sock.send("Connection: close\r\n")
        sock.send("\r\n")
        sock.send(resp)
        return bool(keepalived)
    else:
        return obj
Example #35
0
    def handle(self):
        thd = threading.current_thread()
        # logger.debug("ThreadedTCPRequestHandler--->Handle[%r]"%(thd))
        # logger.debug(dir(thd))
        # logger.debug(self.client_address)
        # logger.debug(dir(self.server))
        # logger.debug(dir(self.request))
        # logger.debug(self.request.__class__)

        # logger.debug(self.server.socket)

        fileobj = open('/opt/Keeprapid/KRWatch/server/conf/db.conf', 'r')
        _json_dbcfg = json.load(fileobj)
        fileobj.close()
        fileobj = open("/opt/Keeprapid/KRWatch/server/conf/config.conf", "r")
        _config = json.load(fileobj)
        fileobj.close()

        self._redis = redis.StrictRedis(_json_dbcfg['redisip'],
                                        int(_json_dbcfg['redisport']),
                                        password=_json_dbcfg['redispassword'])

        queuename = "W:Queue:httpproxy"
        if _config is not None and 'httpproxy' in _config and _config[
                'httpproxy'] is not None:
            if 'Consumer_Queue_Name' in _config['httpproxy'] and _config[
                    'httpproxy']['Consumer_Queue_Name'] is not None:
                queuename = _config['httpproxy']['Consumer_Queue_Name']

        servicelist = os.listdir('./apps')
        try:
            # if 1:
            # sockobj = self._httpclientsocketqueue.get()
            request_path = ""
            body = []
            p = HttpParser()
            seqid = uuid.uuid1()
            # requestdict = dict()
            # requestdict['sock'] = self.request
            # requestdict['server'] = self.server
            # requestdatetime = time.strftime('%Y.%m.%d.%H.%M.%S', time.localtime(time.time()))
            # requestdict['requestdatetime'] = requestdatetime
            # responsesocketdict[seqid.__str__()] = requestdict
            # logger.debug("responsesocketdict len = %d", len(responsesocketdict))
            selfqueuename = "%s:%s" % (queuename, seqid.__str__())
            logger.debug("ThreadedTCPRequestHandler::run : %s" %
                         (selfqueuename))

            while True:
                self.request.settimeout(10)
                request = self.request.recv(recv_buf_len)
                # logger.warning("request  : %s" % (request))
                recved = len(request)
                # logger.warning("recved   : %d" % (recved))
                if (recved == 0):
                    logger.warning("socket is closed by peer")
                    self.request.close()
                    return

                nparsed = p.execute(request, recved)
                # logger.warning("nparsed  : %d" % (nparsed))
                if nparsed != recved:
                    logger.warning("parse error")
                    self.request.sendall('HTTP/1.1 500 OK\n\n')
                    self.request.close()
                    break

                if p.is_partial_body():
                    body.append(p.recv_body())
                    # logger.warning("body  : %s" % (body))

                if p.is_message_complete():
                    #                        logger.warning("message complete")
                    break

            content = "".join(body)

            routekey = ""
            servicepath = ""

            # 如果是/xxx格式认为是route key,如果是/xxx/yyy/zzz格式认为是dest service
            request_path = p.get_path()[1:]

            # logger.warning('ThreadedTCPRequestHandler request_path (%s), is routekey (%d)' % (request_path, request_path.find('/')))
            # logger.debug("content : %s" % (content))
            if content == '':
                self.request.close()
                # responsesocketdict.pop(seqid.__str__())
                return

            if request_path.find('/') == -1 and len(
                    request_path) and request_path in servicelist:

                routekey = "W:Queue:%s" % request_path
                if request_path in _config:
                    routekey = _config[request_path]['Consumer_Queue_Name']

                if len(content) == 0:
                    content_json = dict()
                else:
                    content_json = json.loads(content)

                content_json['sockid'] = seqid.__str__()
                content_json['from'] = selfqueuename
                self._redis.lpush(routekey, json.dumps(content_json))
                #进入接收模块
                t1 = time.time()
                while 1:
                    if self._redis.llen(selfqueuename) > 0:
                        recvdata = self._redis.rpop(selfqueuename)
                        # logger.debug("ThreadedTCPRequestHandler:%r",recvdata)
                        recvbuf = json.loads(recvdata)
                        recvbuf.pop('sockid')
                        recvbuf.pop('from')
                        self.request.sendall('HTTP/1.1 200 OK\n\n%s' %
                                             (json.dumps(recvbuf)))
                        self.request.close()
                        return
                    time.sleep(0.1)
                    t2 = time.time()
                    if t2 - t1 > 10:
                        #超时未返回
                        logger.error(
                            "ThreadedTCPRequestHandler: Waiting...... TIMEOUT")
                        self.request.sendall('HTTP/1.1 500 OK\n\n%s' %
                                             (json.dumps(recvbuf)))
                        self.request.close()
                        return
            else:
                ret = dict()
                ret['error_code'] = '40004'
                self.request.sendall('HTTP/1.1 200 OK\n\n%s' %
                                     (json.dumps(ret)))
                #                    sockobj.shutdown(socket.SHUT_WR)
                self.request.close()
                # responsesocketdict.pop(seqid.__str__())
                return

        except Exception as e:
            logger.error("ThreadedTCPRequestHandler %s except raised : %s " %
                         (e.__class__, e.args))
            self.request.close()
            return
    def recvrawsocket2(sockobj, address):
        try:
            #        if 1:
            # logger.error(sockobj)
            #            logger.debug(dir(sockobj))
            request_path = ""
            body = []
            p = HttpParser()
            seqid = uuid.uuid1()
            requestdict = dict()
            requestdict['sock'] = sockobj
            #                requestdatetime = time.strftime('%Y.%m.%d.%H.%M.%S', time.localtime(time.time()))
            requestdatetime = time.time()
            requestdict['requestdatetime'] = requestdatetime
            responsesocketdict[seqid.__str__()] = requestdict
            # logger.debug("responsesocketdict len = %d", len(responsesocketdict))

            while True:
                request = sockobj.recv(recv_buf_len)
                #                logger.warning("request  : %s" % (request))

                recved = len(request)
                #                logger.warning("recved   : %d" % (recved))

                if (recved == 0):
                    logger.warning("socket is closed by peer %r" % (sockobj))
                    sockobj.close()
                    break

                nparsed = p.execute(request, recved)
                #                logger.warning("nparsed  : %d" % (nparsed))
                if nparsed != recved:
                    logger.warning("parse error")
                    sockobj.close()
                    break

                if p.is_headers_complete():
                    request_headers = p.get_headers()
    #                        for key in request_headers:
    #                        logger.debug("headers complete %s" % (request_headers.__str__()))

    #                        logger.warning("headers complete")

                if p.is_partial_body():
                    body.append(p.recv_body())
    #                        logger.warning("body  : %s" % (body))

                if p.is_message_complete():
                    #                        logger.warning("message complete")
                    break

#            logger.debug(p.get_method())
#            logger.debug(p.get_path())
#            logger.debug(p.get_query_string())

            routekey = ""
            servicepath = ""

            # 如果是/xxx格式认为是route key,如果是/xxx/yyy/zzz格式认为是dest service
            request_path = p.get_path()[1:]
            request_pathlist = request_path.split('/')
            servicename = request_pathlist[0]
            action_name = ''
            servicelist = os.listdir('./apps')
            content = dict()
            if p.get_method() == 'GET':
                if servicename == 'showip':
                    sockobj.sendall("HTTP/1.1 200 OK \n\n%s" % (sockobj))
                    sockobj.shutdown(socket.SHUT_WR)
                    sockobj.close()
                    return

                if len(request_pathlist) != 2:
                    ret = dict()
                    ret['errcode'] = '40004'
                    ret['errmsg'] = _errmsg['40004']
                    sockobj.sendall('HTTP/1.1 500 OK\n\n%s' %
                                    (json.dumps(ret)))
                    sockobj.shutdown(socket.SHUT_WR)
                    sockobj.close()
                    return

                action_name = request_pathlist[1]

                querystring = p.get_query_string()
                querylist = querystring.split('&')
                action_body = dict()
                for query in querylist:
                    kvlist = query.split('=')
                    action_body[kvlist[0]] = ''.join(kvlist[1:])
                content['action_cmd'] = action_name
                content['seq_id'] = str(random.randint(10000, 1000000))
                content['body'] = action_body
                content['version'] = '1.0'

            else:
                if len(body) > 0:
                    content = json.loads("".join(body))
#                content = "".join(body)

# logger.debug("servicename=%s,action_name=%s"%(servicename,action_name))
# logger.debug("content=%r"%(content))
            if servicename == 'testurl':
                sockobj.sendall('HTTP/1.1 200 OK\n\n%s' %
                                (content['body']['signature']))
                sockobj.shutdown(socket.SHUT_WR)
                sockobj.close()
                return

            if servicename in servicelist:
                routekey = "A:Queue:%s" % servicename
                if servicename in _config:
                    routekey = _config[servicename]['Consumer_Queue_Name']
                content['sockid'] = seqid.__str__()
                content['from'] = selfqueuename
                _redis.lpush(routekey, json.dumps(content))
            else:
                ret = dict()
                ret['errcode'] = '40004'
                ret['errmsg'] = _errmsg['40004']
                sockobj.sendall('HTTP/1.1 404 OK\n\n%s' % (json.dumps(ret)))
                sockobj.shutdown(socket.SHUT_WR)
                sockobj.close()

    #                requestdict = dict()
    #                requestdict['sock'] = sockobj
    #                requestdatetime = time.strftime(
    #                    '%Y.%m.%d.%H.%M.%S', time.localtime(time.time()))
    #                requestdict['requestdatetime'] = requestdatetime
    #                responsesocketdict[seqid.__str__()] = requestdict

    # sockobj.sendall('HTTP/1.1 200 OK\n\nWelcome %s' % (
    #    seqid))
    # sockobj.close()

        except Exception as e:
            logger.error("recvrawsocket2 %s except raised : %s " %
                         (e.__class__, e.args))
Example #37
0
    def run(self):
        queuename = "A:Queue:httpproxy"
        if self._config is not None and 'httpproxy' in self._config and self._config['httpproxy'] is not None:
            if 'Consumer_Queue_Name' in self._config['httpproxy'] and self._config['httpproxy']['Consumer_Queue_Name'] is not None:
                queuename = self._config['httpproxy']['Consumer_Queue_Name']

        selfqueuename = "%s:%s" % (queuename, os.getpid())
        logger.debug("PublishThread::run : %s" % (selfqueuename))
        servicelist = os.listdir('./apps')
        while True:
            try:
                sockobj = self._httpclientsocketqueue.get()
                request_path = ""
                body = []
                p = HttpParser()
                seqid = uuid.uuid1()
                requestdict = dict()
                requestdict['sock'] = sockobj
#                requestdatetime = time.strftime(
#                    '%Y.%m.%d.%H.%M.%S', time.localtime(time.time()))
                requestdict['requestdatetime'] = time.time()
                self._response_socket_dict[seqid.__str__()] = requestdict
#                logger.debug("responsesocketdict len = %d", len(self._response_socket_dict))

                while True:
                    request = sockobj.recv(self._recvbuflen)
#                    logger.warning("request  : %s" % (request))

                    recved = len(request)
#                    logger.warning("recved   : %d" % (recved))

                    if(recved == 0):
                        logger.warning("socket is closed by peer")
                        sockobj.close()
                        break

                    nparsed = p.execute(request, recved)
#                    logger.warning("nparsed  : %d" % (nparsed))
                    if nparsed != recved:
                        logger.warning("parse error")
                        sockobj.close()
                        break

#                    if p.is_headers_complete():
#                        request_headers = p.get_headers()
#                        for key in request_headers:
#                            logger.debug("%s: %s" % (key, request_headers[key]))

#                        logger.warning("headers complete")

                    if p.is_partial_body():
                        body.append(p.recv_body())
#                        logger.warning("body  : %s" % (body))

                    if p.is_message_complete():
#                        logger.warning("message complete")
                        break

                content = "".join(body)


                routekey = ""
                servicepath = ""

                # 如果是/xxx格式认为是route key,如果是/xxx/yyy/zzz格式认为是dest service
                request_path = p.get_path()[1:]

#                logger.warning('PublishThread request_path (%s), is routekey (%d)' % (request_path, request_path.find('/')))
#                logger.debug("content : %s" % (content))

                if request_path.find('/') == -1 and len(request_path) and request_path in servicelist:

                    routekey = "A:Queue:%s" % request_path
                    if request_path in self._config:
                        routekey = self._config[request_path]['Consumer_Queue_Name']

                    if len(content) == 0:
                        content_json = dict()
                    else:
                        content_json = json.loads(content)

                    content_json['sockid'] = seqid.__str__()
                    content_json['from'] = selfqueuename
                    self._redis.lpush(routekey, json.dumps(content_json))
                else:
                    ret = dict()
                    ret['error_code'] = '40004'
                    sockobj.sendall('HTTP/1.1 200 OK\n\n%s' % (json.dumps(ret)))
#                    sockobj.shutdown(socket.SHUT_WR)
                    sockobj.close()
                    self._response_socket_dict.pop(seqid.__str__())
                    continue



                # sockobj.sendall('HTTP/1.1 200 OK\n\nWelcome %s' % (
                #    seqid))
                # sockobj.close()

            except Exception as e:
                logger.error("PublishThread %s except raised : %s " % (
                    e.__class__, e.args))
Example #38
0
File: proxy.py Project: BwRy/sandy
class ConnectionHandler:
    def __init__(self, connection, address, timeout):
        self.body_file =""
        self.p = HttpParser()
	self.body = []
	self.request_url = ""
	self.response_header = []
	self.header_done = False
        self.url =""
        self.controller = []
        self.controller_ip = []
        self.client = connection
        self.client_buffer = ''
        self.timeout = timeout
        self.method, self.path, self.protocol = self.get_base_header()
        if self.method=='CONNECT':
            self.method_CONNECT()
        elif self.method in ('OPTIONS', 'GET', 'HEAD', 'POST', 'PUT',
                             'DELETE', 'TRACE'):
            self.method_others()
        self.client.close()
        self.target.close()
        #clear
        #print self.controller , self.controller_ip

    def get_base_header(self):
        while 1:
            self.client_buffer += self.client.recv(BUFLEN)
            end = self.client_buffer.find('\n')
            if end!=-1:
                break
        #We dont wann those google.com urls.        
        if not "127.0.0.1" in self.client_buffer[:end]:
	  
	  #Insert Url into database here
          self.url = '%s'%self.client_buffer[:end]
          
          
        data = (self.client_buffer[:end+1]).split()
        self.client_buffer = self.client_buffer[end+1:]
        #print data
        return data

    def method_CONNECT(self):
        self._connect_target(self.path)
        self.client.send(HTTPVER+' 200 Connection established\n'+
                         'Proxy-agent: %s\n\n'%VERSION)
        self.client_buffer = ''
        self._read_write()        

    def method_others(self):
        self.path = self.path[7:]
        i = self.path.find('/')
        host = self.path[:i]        
        path = self.path[i:]
        self._connect_target(host)
        self.target.send('%s %s %s\n'%(self.method, path, self.protocol)+
                         self.client_buffer)
        self.client_buffer = ''
        self._read_write()

    def _connect_target(self, host):
        i = host.find(':')
        if i!=-1:
            port = int(host[i+1:])
            host = host[:i]
            #print host
        else:
            port = 80
        try:
	  
	  
          (soc_family, _, _, _, address) = socket.getaddrinfo(host, port)[0]
          print "Adress is ",address
          self.target = socket.socket(soc_family)
          self.target.connect(address)
          
        except Exception as e:
	  print "Error Connecting to:"+str(host)+":"+str(port)
	  self.request_url = "Error Connecting to:"+str(host)+":"+str(port)
	  # insert to db here
        #Concat data to string
        self.request_url = str(host)+" | "+str(address)+" | "+str(self.url) #debug
        #print self.request_url


    def _read_write(self):
        
        time_out_max = self.timeout/3
        socs = [self.client, self.target]
        count = 0
        while 1:
            count += 1
            (recv, _, error) = select.select(socs, [], socs, 3)
            if error:
                break
            if recv:
                for in_ in recv:
		    try:
		      
		      #print " Receving Data "
                      data = in_.recv(84333)
                    except Exception as e:
		      print e
		      pass
		    
                    if in_ is self.client:
                        out = self.target
                    else:
                        out = self.client
                    try:
		      
		      
		      if data:
			  #column 25
			  #Dig here to analysis the traffic
			  #print data
			  try:
			    #Lets parse the data using http_parser modules
			    
			    recved = len(data)
			    #
			    #print "We received so far "+ str(recved)
			    nparsed = self.p.execute(data, recved)
			    assert nparsed == recved
			    # Check 
			    if self.p.is_headers_complete() and not self.header_done:
			      
			      #Header is an ordered dictionary 
			      header_s = self.p.get_headers()
			      
			     
			      # Lets beautify it and print it.
			      for header, value in header_s.items():
				
				#Print Response
				# Connection : close format
				res_header = header+": "+value
				self.response_header.append(res_header)
			      
			        self.header_done = True
			        #Put header to Database.
			        
			   
			    #Check if the boday is partial, if then append the body
			    if self.p.is_partial_body():
			      
			      self.body.append(self.p.recv_body())
			      #print "appending body" +self.p.recv_body()
			      #Append data body recived to a list
			      #print self.body
			      
			    # If the parsing of current request is compleated 
			    if self.p.is_message_complete():
			      
			      try:
				
				try:
				  
				  content_length = self.p.get_headers()['content-length']
			        
			        except Exception as e:
				  print "Exception in Body retrive-sub"+str(e)
				  content_length = 0
				  pass
				  
				  self.body_file = "".join(self.body)
			        body_file_type = ms.buffer(self.body_file[:1024])
			        #print self.request_url
			        #print self.response_header
			        #print body_file_type
			        print urlid 
			        update_traffic(urlid,self.request_url,self.response_header,body_file_type)
			      except Exception as e:
				print "Exception in Body retrive"+str(e)
				content_length = 0
				pass
			      
			      
			  except Exception as e:
			    print e
			    pass

			  #if filetype in traffice == jar,class , pdf,flash, execute
			  #save those files
			  
			  
			  out.send(data)
			  count = 0
		      
	
	            except Exception as e:
		      print e
		      pass
            if count == time_out_max:
                break
Example #39
0
class HttpProxyProtocol(asyncio.Protocol):
    ''' Implement HTTP(S) proxy behavior. '''

    def __init__(self, loop, config, token_store):
        ''' Constructor. '''

        self._parser = HttpParser()
        self._body = b''
        self._config = config
        self._loop = loop
        self._mitm = None
        self._mitm_host = None
        self._token_store = token_store

        self._instagram = InstagramApi(
            client_id=config['Instagram']['ClientID'],
            client_secret=config['Instagram']['ClientSecret'],
        )

        self._twitter = TwitterApi(
            consumer_key=config['Twitter']['ConsumerKey'],
            consumer_secret=config['Twitter']['ConsumerSecret'],
            app_token=config['Twitter']['AppToken'],
            app_token_secret=config['Twitter']['AppTokenSecret'],
        )

    def connection_made(self, transport):
        ''' Save a reference to the transport so that we can send a reply. '''
        log.debug('Connection opened.')
        self._transport = transport

    def connection_lost(self, exc):
        log.debug('Connection closed.')

    def data_received(self, data):
        ''' Parse incoming HTTP request. '''

        log.debug('Data received: {}'.format(data))
        self._parser.execute(data, len(data))

        if self._parser.is_partial_body():
            self._body += self._parser.recv_body()

        if self._parser.is_message_complete():
            method = self._parser.get_method()
            uri = self._parser.get_url()
            version = self._parser.get_version()
            headers = self._parser.get_headers()
            content_type = headers.get('Content-type', '')
            charset = _get_charset(content_type)
            body = self._body.decode(charset)

            log.debug('Client charset: {}'.format(charset))
            log.debug('Client status: method={} uri={} version={}' \
                      .format(method, uri, version))
            log.debug('Client headers: {}'.format(headers))
            log.debug('Client body: {}...'.format(body[:1000]))

            if method == 'CONNECT':
                asyncio.async(self._start_mitm(uri, version))
                self._parser = HttpParser()
            else:
                asyncio.async(
                    self._request_upstream(
                        method,
                        uri,
                        version,
                        headers,
                        body
                    )
                )


    def start_tls(self, version):
        '''
        Initiate TLS session with the client.

        This part is completely hacky! We mess around with the
        transport's internals in order to wrap the current transport in TLS.
        Python doesn't have an official way to do this, although it *might*
        get fixed in 3.6: http://bugs.python.org/issue23749
        '''

        log.debug('The proxy is starting TLS with its client.')

        status_line = 'HTTP/{}.{} {} {}\r\n\r\n' \
                      .format(version[0], version[1], 200, 'OK')
        self._transport.write(status_line.encode('ascii'))

        ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
        ssl_context.set_ciphers('HIGH:!aNull:!eNull')
        ssl_context.load_cert_chain('ssl/server.crt', 'ssl/server.key')

        original_socket = self._transport._sock
        self._transport = self._loop._make_ssl_transport(
            original_socket,
            self,
            ssl_context,
            server_side=True
        )

    @asyncio.coroutine
    def _request_upstream(self, method, uri, version, headers, body):
        ''' Forward the request to the upstream server. '''

        try:
            yield from self._request_upstream_helper(
                method, uri, version, headers, body
            )
        except Exception:
            charset = _get_charset(headers.get('Content-type', ''))
            response = render_http_response(
                version,
                500,
                'PROXY ERROR',
                {'Content-type': 'text/plain; charset={}'.format(charset)},
                traceback.format_exc().encode(charset)
            )
            self._transport.write(response)
            self._transport.close()
            raise

    @asyncio.coroutine
    def _request_upstream_helper(self, method, uri, version, headers, body):
        ''' Forward the request to the upstream server. '''

        log.debug('_request_upstream(): method={}, uri={}' \
                  .format(method, uri))

        if self._mitm_host:
            parsed = urlparse(uri)
            url = 'https://{}{}'.format(self._mitm_host, parsed.path)
        else:
            url = uri

        token, remaining = self._token_store.dispense(url)
        log.debug('Signing request with {} token: {}.'
                  .format(token.site, token.public))

        if 'instagram' in url:
            qp = parse_qs(parsed.query)
            qp['access_token'] = token.public

            qp['sig'] = self._instagram.oauth_sign(
                method=method,
                url=url,
                token=token,
                query_params=qp,
                body_params=parse_qs(body)
            )

            params = ['{}={}'.format(quote(k.encode('utf8')), quote(v.encode('utf8'))) for k,v in qp.items()]
            uri = '{}?{}'.format(parsed.path, '&'.join(params))
            log.debug('Signed instagram URL: {}'.format(uri))
        elif 'twitter' in url:
            headers['Authorization'] = self._twitter.oauth_sign(
                method=method,
                url=url,
                token=token.public,
                token_secret=token.secret,
                query_params=parse_qs(parsed.query),
                body_params=parse_qs(body)
            )
        else:
            raise ValueError('No signing algorithm known for URL: {}'
                             .format(url))

        if self._mitm is None:
            url = urlparse(uri)
            host = url.hostname
            port = url.port

            if port is None:
                port = 80 if url.scheme == 'http' else 443

            log.debug('Connecting to upstream (plaintext).')
            upstream = yield from asyncio.open_connection(host, port)
            upstream_reader, upstream_writer = upstream
            request = render_http_request(method, uri, version, headers, body)
            upstream_writer.write(request)

            response = b''
            parser = HttpParser()

            while True:
                if not parser.is_headers_complete():
                    data = yield from upstream_reader.readline()
                else:
                    data = yield from upstream_reader.read(
                        int(parser.get_headers()['Content-Length'])
                    )

                log.debug('Received plaintext from upstream: {}'.format(data))
                parser.execute(data, len(data))

                if parser.is_partial_body():
                    body += parser.recv_body()

                if parser.is_message_complete():
                    version = self._parser.get_version()
                    status = self._parser.get_status_code()
                    reason = None # For some reason, the parser doesn't expose this :(
                    headers = self._parser.get_headers()

                    if status == 200:
                        self._token_store.update_rate_limit(url, headers)

                    log.debug('Plaintext upstream status: {}'.format(status))
                    log.debug('Plaintext upstream headers: {}'.format(headers))
                    log.debug('Plaintext upstream body: {}...'.format(body[:1000]))

                    response = render_http_response(
                        version, status, reason, headers, body
                    )

                    break

            upstream_writer.close()

        else:
            upstream_write = self._mitm.forward
            request = render_http_request(method, uri, version, headers, body)
            upstream_write(request)
            response = yield from self._mitm.receive()
            version, status, reason, headers, body = response

            if status == 200:
                self._token_store.update_rate_limit(token, url, headers)

            response = render_http_response(
                version, status, reason, headers, body
            )

        # Forward the upstream response to the client.
        self._transport.write(response)
        self._transport.close()

    def _set_header(self, key, value):
        ''' Set a header value. '''

        key = key.strip().upper()
        value = value.strip()
        self._headers[key] = value

    @asyncio.coroutine
    def _start_mitm(self, uri, version):
        ''' MITM a connection to the upstream server. '''

        log.debug('The proxy is starting an MITM connection.')
        host, port = uri.split(':')
        port = int(port)
        self._mitm_host = host

        _, self._mitm = yield from self._loop.create_connection(
            lambda: MitmProtocol(self._loop, version, self),
            host,
            port,
            ssl = ssl.create_default_context()
        )
Example #40
0
class MitmProtocol(asyncio.Protocol):
    ''' Handles details of MITMing a TLS connection. '''

    def __init__(self, loop, http_version, proxy):
        ''' Constructor. '''

        self._http_version = http_version
        self._loop = loop
        self._parser = HttpParser()
        self._proxy = proxy
        self._received = asyncio.Future()
        self._body = b''

    def connection_made(self, transport):
        ''' Save a reference to the transport. '''

        log.debug('MITM connection opened.')
        self._transport = transport
        cert = self._transport.get_extra_info('peercert')
        log.debug('MITM upstream certificate: {}'.format(cert))
        self._loop.call_soon(self._proxy.start_tls, self._http_version)

    def connection_lost(self, exc):
        log.debug('MITM connection closed.')
        self._received.cancel()

    def data_received(self, data):
        ''' Accumulate request data. '''

        log.debug('MITM data received: {}'.format(data))
        self._parser.execute(data, len(data))

        if self._parser.is_partial_body():
            self._body += self._parser.recv_body()

        if self._parser.is_message_complete():
            version = self._parser.get_version()
            status = self._parser.get_status_code()
            reason = None # For some reason, the parser doesn't expose this :(
            headers = self._parser.get_headers()

            log.debug('MITM upstream status: {}'.format(status))
            log.debug('MITM upstream headers: {}'.format(headers))
            log.debug('MITM upstream body: {}...'.format(self._body[:1000]))

            self._received.set_result(
                (version, status, reason, headers, self._body)
            )
            self._transport.close()

    def forward(self, data):
        ''' Forward data to upstream host. '''

        log.debug('MITM sending data: {}'.format(data))
        self._transport.write(data)

    @asyncio.coroutine
    def receive(self):
        ''' Read data received by this MITM instance. '''

        response = yield from self._received
        return response
Example #41
0
    def __call__(self, addr):
        """Since an instance of HttpServer is passed to the Service
        class (with appropriate request_handler established during
        initialization), this __call__ method is what's actually
        invoked by diesel.
        """
        data = None
        while True:
            try:
                h = HttpParser()
                body = []
                while True:
                    if data:
                        used = h.execute(data, len(data))
                        if h.is_headers_complete():
                            body.append(h.recv_body())
                        if h.is_message_complete():
                            data = data[used:]
                            break
                    data = receive()

                env = h.get_wsgi_environ()
                if "HTTP_CONTENT_LENGTH" in env:
                    env["CONTENT_LENGTH"] = env.pop("HTTP_CONTENT_LENGTH")
                if "HTTP_CONTENT_TYPE" in env:
                    env["CONTENT_TYPE"] = env.pop("HTTP_CONTENT_TYPE")

                env.update(
                    {
                        "wsgi.version": (1, 0),
                        "wsgi.url_scheme": "http",  # XXX incomplete
                        "wsgi.input": cStringIO.StringIO("".join(body)),
                        "wsgi.errors": FileLikeErrorLogger(hlog),
                        "wsgi.multithread": False,
                        "wsgi.multiprocess": False,
                        "wsgi.run_once": False,
                        "REMOTE_ADDR": addr[0],
                        "SERVER_NAME": HOSTNAME,
                        "SERVER_PORT": str(self.port),
                    }
                )
                req = Request(env)
                if req.headers.get("Connection", "").lower() == "upgrade":
                    req.data = data

                resp = self.request_handler(req)
                if "Server" not in resp.headers:
                    resp.headers.add("Server", SERVER_TAG)
                if "Date" not in resp.headers:
                    resp.headers.add("Date", utcnow().strftime("%a, %d %b %Y %H:%M:%S UTC"))

                assert resp, "HTTP request handler _must_ return a response"

                self.send_response(resp, version=h.get_version())

                if (
                    (not h.should_keep_alive())
                    or resp.headers.get("Connection", "").lower() == "close"
                    or resp.headers.get("Content-Length") == None
                ):
                    return

            except ConnectionClosed:
                break
Example #42
0
class HTTPSession(base_object.BaseObject):

	_http_header = ""
	_method = ""
	_version = ""
	_req_obj = ""
	_user_agent = "User-Agent: COS-598C-Project-Client\r\n"
	_accept = "Accept: */*\r\n"
	_accept_enc = "Accept-Encoding: *\r\n"
	_accept_charset = "Accept-Charset: *\r\n"
	_host = ""
	_writer = ""
	_closeable = False
	_http_parser = ""
	_nr_bytes = 0
	
	def __init__ (self, method, req_obj, version):
		self._method = method
		self._req_obj = req_obj
		self._version = version
		self._http_parser = HttpParser()

	def _build_first_line(self):
		first_line = self._method+" "+self._req_obj+" "+self._version+"\r\n"
		return first_line
	
	def set_host(self, host):
		self._host = "Host: "+host+"\r\n"	

	def set_writer(self, writer):
		self._writer = writer

	def write_response(self, data):
		recved = len(data)
		nparsed = self._http_parser.execute(data, recved)
		assert nparsed == recved
		self._nr_bytes += recved	
		if self._http_parser.is_partial_body():
			self._writer.write(str(self._http_parser.recv_body()))

		if self._http_parser.is_message_complete():
			self._closeable = True
		return self._nr_bytes
		
	def get_response_headers(self):
		if self._http_parser.is_headers_complete():
                	return self._http_parser.get_headers()

	def closeable(self):
		return self._closeable

	def set_port(self, port):
		return

	def get_request(self):
		self._http_header = self._build_first_line()+\
					self._host+\
					self._user_agent+\
					self._accept+\
					self._accept_enc+\
					self._accept_charset+\
					"\r\n"
		return self._http_header
Example #43
0
    print "http res"
    rsp = ""
    with open("../testing/response.http", "r") as f:
        rsp = f.readlines()
        rsp = "".join(rsp)
        rsp = rsp[:-2]
        print "buffer :"
        print rsp
    print "parsing ..."
    p = HttpParser()
    recved = len(rsp)
    nparsed = p.execute(rsp, recved)
    assert nparsed == recved

    if p.is_message_complete():
        print "message complete"

    print "--------------------"

    rsp_1 = ""
    with open("../testing/test1_response_part1.http", "r") as f:
        rsp_1 = f.readlines()
        rsp_1 = "".join(rsp_1)
        rsp_1 = rsp_1[:-2]
    rsp_2 = ""
    with open("../testing/test1_response_part2.http", "r") as f:
        rsp_2 = f.readlines()
        rsp_2 = "".join(rsp_2)
        rsp_2 = rsp_2[:-2]
Example #44
0
    def __call__(self, addr):
        '''Since an instance of HttpServer is passed to the Service
        class (with appropriate request_handler established during
        initialization), this __call__ method is what's actually
        invoked by diesel.
        '''
        data = None
        while True:
            try:
                h = HttpParser()
                body = []
                while True:
                    if data:
                        used = h.execute(data, len(data))
                        if h.is_headers_complete():
                            body.append(h.recv_body())
                        if h.is_message_complete():
                            data = data[used:]
                            break
                    data = receive()

                env = h.get_wsgi_environ()
                if 'HTTP_CONTENT_LENGTH' in env:
                    env['CONTENT_LENGTH'] = env.pop("HTTP_CONTENT_LENGTH")
                if 'HTTP_CONTENT_TYPE' in env:
                    env['CONTENT_TYPE'] = env.pop("HTTP_CONTENT_TYPE")

                env.update({
                    'wsgi.version' : (1,0),
                    'wsgi.url_scheme' : 'http', # XXX incomplete
                    'wsgi.input' : cStringIO.StringIO(''.join(body)),
                    'wsgi.errors' : FileLikeErrorLogger(hlog),
                    'wsgi.multithread' : False,
                    'wsgi.multiprocess' : False,
                    'wsgi.run_once' : False,
                    'REMOTE_ADDR' : addr[0],
                    'SERVER_NAME' : HOSTNAME,
                    'SERVER_PORT': str(self.port),
                    })
                req = Request(env)
                if req.headers.get('Connection', '').lower() == 'upgrade':
                    req.data = data

                resp = self.request_handler(req)
                if 'Server' not in resp.headers:
                    resp.headers.add('Server', SERVER_TAG)
                if 'Date' not in resp.headers:
                    resp.headers.add('Date', utcnow().strftime("%a, %d %b %Y %H:%M:%S UTC"))

                assert resp, "HTTP request handler _must_ return a response"

                self.send_response(resp, version=h.get_version())

                if (not h.should_keep_alive()) or \
                    resp.headers.get('Connection', '').lower() == "close" or \
                    resp.headers.get('Content-Length') == None:
                    return

                # Switching Protocols
                if resp.status_code == 101 and hasattr(resp, 'new_protocol'):
                    resp.new_protocol(req)
                    break

            except ConnectionClosed:
                break
Example #45
0
    def _request_upstream_helper(self, method, uri, version, headers, body):
        ''' Forward the request to the upstream server. '''

        log.debug('_request_upstream(): method={}, uri={}' \
                  .format(method, uri))

        if self._mitm_host:
            parsed = urlparse(uri)
            url = 'https://{}{}'.format(self._mitm_host, parsed.path)
        else:
            url = uri

        token, remaining = self._token_store.dispense(url)
        log.debug('Signing request with {} token: {}.'
                  .format(token.site, token.public))

        if 'instagram' in url:
            qp = parse_qs(parsed.query)
            qp['access_token'] = token.public

            qp['sig'] = self._instagram.oauth_sign(
                method=method,
                url=url,
                token=token,
                query_params=qp,
                body_params=parse_qs(body)
            )

            params = ['{}={}'.format(quote(k.encode('utf8')), quote(v.encode('utf8'))) for k,v in qp.items()]
            uri = '{}?{}'.format(parsed.path, '&'.join(params))
            log.debug('Signed instagram URL: {}'.format(uri))
        elif 'twitter' in url:
            headers['Authorization'] = self._twitter.oauth_sign(
                method=method,
                url=url,
                token=token.public,
                token_secret=token.secret,
                query_params=parse_qs(parsed.query),
                body_params=parse_qs(body)
            )
        else:
            raise ValueError('No signing algorithm known for URL: {}'
                             .format(url))

        if self._mitm is None:
            url = urlparse(uri)
            host = url.hostname
            port = url.port

            if port is None:
                port = 80 if url.scheme == 'http' else 443

            log.debug('Connecting to upstream (plaintext).')
            upstream = yield from asyncio.open_connection(host, port)
            upstream_reader, upstream_writer = upstream
            request = render_http_request(method, uri, version, headers, body)
            upstream_writer.write(request)

            response = b''
            parser = HttpParser()

            while True:
                if not parser.is_headers_complete():
                    data = yield from upstream_reader.readline()
                else:
                    data = yield from upstream_reader.read(
                        int(parser.get_headers()['Content-Length'])
                    )

                log.debug('Received plaintext from upstream: {}'.format(data))
                parser.execute(data, len(data))

                if parser.is_partial_body():
                    body += parser.recv_body()

                if parser.is_message_complete():
                    version = self._parser.get_version()
                    status = self._parser.get_status_code()
                    reason = None # For some reason, the parser doesn't expose this :(
                    headers = self._parser.get_headers()

                    if status == 200:
                        self._token_store.update_rate_limit(url, headers)

                    log.debug('Plaintext upstream status: {}'.format(status))
                    log.debug('Plaintext upstream headers: {}'.format(headers))
                    log.debug('Plaintext upstream body: {}...'.format(body[:1000]))

                    response = render_http_response(
                        version, status, reason, headers, body
                    )

                    break

            upstream_writer.close()

        else:
            upstream_write = self._mitm.forward
            request = render_http_request(method, uri, version, headers, body)
            upstream_write(request)
            response = yield from self._mitm.receive()
            version, status, reason, headers, body = response

            if status == 200:
                self._token_store.update_rate_limit(token, url, headers)

            response = render_http_response(
                version, status, reason, headers, body
            )

        # Forward the upstream response to the client.
        self._transport.write(response)
        self._transport.close()
Example #46
0
 def handle_read(self):
     try:
         logging.debug('Connection.handle_read - id=%d' % self.id)
         b = self.sock.recv(2048)
         logging.debug(
             'Connection.handle_read - received buffer size is %d bytes' %
             len(b))
         logging.debug(
             'Connection.handle_read - received buffer is : \n%s' % b)
         if not len(b):
             logging.debug(
                 'Connection.handle_read - 0 bytes received on %d. closing'
                 % self.id)
             self.close()
             return
         self.read_buf += b
     except socket.error as err:
         if err.args[0] not in NONBLOCKING:
             self.handle_error('%s' % args[1])
         else:
             logging.error(
                 'Connection.handle_read - NONBLOCKING event on read : %s' %
                 args[1])
     else:
         # check if we have a full http request
         parser = HttpParser()
         recved = len(self.read_buf)
         nparsed = parser.execute(self.read_buf, recved)
         assert nparsed == recved
         if not parser.is_message_complete():
             # we got a partial request keep on reading
             logging.debug(
                 'Connection.handle_read - partial buffer received : \n%s' %
                 self.read_buf)
             self.reset(pyev.EV_READ)
         else:
             # we got a full request
             self.read_buf = ''
             # match the verb with URI and call
             # after that register for write to send response
             verb = parser.get_method()
             url = parser.get_path()
             logging.debug(
                 'Connection.handle_read - id  %d - method is %s and url %s'
                 % (self.id, verb, url))
             call, keyword_args = register.get_callable(url, verb)
             if not call:
                 err = HttpResponse()
                 err.status_code = 404
                 err.status_string = 'Not Found'
                 err.headers['Content-Type'] = 'application/txt'
                 err.body = 'URI Not Found\r\n'
                 self.write_buf = err.to_string()
             else:
                 keyword_args['http_request'] = parser
                 logging.debug('Connection.handle_read - kargs=%s' %
                               keyword_args)
                 try:
                     response = call(*[
                         register.handler,
                     ], **keyword_args)
                     self.write_buf = response.to_string()
                 except:
                     err = HttpResponse()
                     err.status_code = 500
                     err.status_string = 'Internal Server Error'
                     err.headers['Content-Type'] = 'application/txt'
                     err.body = 'Upsssss.\r\n'
                     self.write_buf = err.to_string()
             logging.debug('Connection.handle_read - requesting write %d' %
                           self.id)
             self.reset(pyev.EV_WRITE)
Example #47
0
 def handle_read(self):
     try:
         logging.debug('Connection.handle_read - id=%d' % self.id)
         b = self.sock.recv(2048)
         logging.debug('Connection.handle_read - received buffer size is %d bytes' % len(b))
         logging.debug('Connection.handle_read - received buffer is : \n%s' % b)
         if not len(b):
             logging.debug('Connection.handle_read - 0 bytes received on %d. closing' %
                           self.id)
             self.close()
             return
         self.read_buf += b
     except socket.error as err:
         if err.args[0] not in NONBLOCKING:
             self.handle_error('%s' % args[1])
         else :
             logging.error('Connection.handle_read - NONBLOCKING event on read : %s' % args[1])
     else:
         # check if we have a full http request
         parser = HttpParser()
         recved = len(self.read_buf)
         nparsed = parser.execute(self.read_buf, recved)
         assert nparsed == recved
         if not parser.is_message_complete():
             # we got a partial request keep on reading
             logging.debug(
                 'Connection.handle_read - partial buffer received : \n%s' % 
                  self.read_buf)
             self.reset(pyev.EV_READ)
         else :
             # we got a full request
             self.read_buf = ''
             # match the verb with URI and call
             # after that register for write to send response
             verb = parser.get_method()
             url = parser.get_path()
             logging.debug('Connection.handle_read - id  %d - method is %s and url %s' % 
                 (self.id, verb, url))
             call, keyword_args = register.get_callable(url, verb)
             if not call :
                 err = HttpResponse()
                 err.status_code = 404
                 err.status_string = 'Not Found'
                 err.headers['Content-Type'] = 'application/txt'                    
                 err.body = 'URI Not Found\r\n'
                 self.write_buf = err.to_string()
             else :        
                 keyword_args['http_request'] = parser
                 logging.debug('Connection.handle_read - kargs=%s' % keyword_args)
                 try :
                     response = call(*[register.handler,], **keyword_args)
                     self.write_buf = response.to_string()
                 except :
                     err = HttpResponse()
                     err.status_code = 500
                     err.status_string = 'Internal Server Error'
                     err.headers['Content-Type'] = 'application/txt'                    
                     err.body = 'Upsssss.\r\n'
                     self.write_buf = err.to_string()
             logging.debug('Connection.handle_read - requesting write %d' % self.id)
             self.reset(pyev.EV_WRITE)
Example #48
0
class ConnectionHandler:
    def __init__(self, connection, address, timeout):
        self.body_file = ""
        self.p = HttpParser()
        self.body = []
        self.request_url = ""
        self.response_header = []
        self.header_done = False
        self.url = ""
        self.controller = []
        self.controller_ip = []
        self.client = connection
        self.client_buffer = ''
        self.timeout = timeout
        self.method, self.path, self.protocol = self.get_base_header()
        if self.method == 'CONNECT':
            self.method_CONNECT()
        elif self.method in ('OPTIONS', 'GET', 'HEAD', 'POST', 'PUT', 'DELETE',
                             'TRACE'):
            self.method_others()
        self.client.close()
        self.target.close()
        #clear
        #print self.controller , self.controller_ip

    def get_base_header(self):
        while 1:
            self.client_buffer += self.client.recv(BUFLEN)
            end = self.client_buffer.find('\n')
            if end != -1:
                break
        #We dont wann those google.com urls.
        if not "127.0.0.1" in self.client_buffer[:end]:

            #Insert Url into database here
            self.url = '%s' % self.client_buffer[:end]

        data = (self.client_buffer[:end + 1]).split()
        self.client_buffer = self.client_buffer[end + 1:]
        #print data
        return data

    def method_CONNECT(self):
        self._connect_target(self.path)
        self.client.send(HTTPVER + ' 200 Connection established\n' +
                         'Proxy-agent: %s\n\n' % VERSION)
        self.client_buffer = ''
        self._read_write()

    def method_others(self):
        self.path = self.path[7:]
        i = self.path.find('/')
        host = self.path[:i]
        path = self.path[i:]
        self._connect_target(host)
        self.target.send('%s %s %s\n' % (self.method, path, self.protocol) +
                         self.client_buffer)
        self.client_buffer = ''
        self._read_write()

    def _connect_target(self, host):
        i = host.find(':')
        if i != -1:
            port = int(host[i + 1:])
            host = host[:i]
            #print host
        else:
            port = 80
        try:

            (soc_family, _, _, _, address) = socket.getaddrinfo(host, port)[0]
            print "Adress is ", address
            self.target = socket.socket(soc_family)
            self.target.connect(address)

        except Exception as e:
            print "Error Connecting to:" + str(host) + ":" + str(port)
            self.request_url = "Error Connecting to:" + str(host) + ":" + str(
                port)
            # insert to db here
            #Concat data to string
        self.request_url = str(host) + " | " + str(address) + " | " + str(
            self.url)  #debug
        #print self.request_url

    def _read_write(self):

        time_out_max = self.timeout / 3
        socs = [self.client, self.target]
        count = 0
        while 1:
            count += 1
            (recv, _, error) = select.select(socs, [], socs, 3)
            if error:
                break
            if recv:
                for in_ in recv:
                    try:

                        #print " Receving Data "
                        data = in_.recv(84333)
                    except Exception as e:
                        print e
                        pass

                    if in_ is self.client:
                        out = self.target
                    else:
                        out = self.client
                    try:

                        if data:
                            #column 25
                            #Dig here to analysis the traffic
                            #print data
                            try:
                                #Lets parse the data using http_parser modules

                                recved = len(data)
                                #
                                #print "We received so far "+ str(recved)
                                nparsed = self.p.execute(data, recved)
                                assert nparsed == recved
                                # Check
                                if self.p.is_headers_complete(
                                ) and not self.header_done:

                                    #Header is an ordered dictionary
                                    header_s = self.p.get_headers()

                                    # Lets beautify it and print it.
                                    for header, value in header_s.items():

                                        #Print Response
                                        # Connection : close format
                                        res_header = header + ": " + value
                                        self.response_header.append(res_header)

                                        self.header_done = True
                                    #Put header to Database.

                                #Check if the boday is partial, if then append the body
                                if self.p.is_partial_body():

                                    self.body.append(self.p.recv_body())
                                    #print "appending body" +self.p.recv_body()
                                    #Append data body recived to a list
                                    #print self.body

                                # If the parsing of current request is compleated
                                if self.p.is_message_complete():

                                    try:

                                        try:

                                            content_length = self.p.get_headers(
                                            )['content-length']

                                        except Exception as e:
                                            print "Exception in Body retrive-sub" + str(
                                                e)
                                            content_length = 0
                                            pass

                                            self.body_file = "".join(self.body)
                                        body_file_type = ms.buffer(
                                            self.body_file[:1024])
                                        #print self.request_url
                                        #print self.response_header
                                        #print body_file_type
                                        print urlid
                                        update_traffic(urlid, self.request_url,
                                                       self.response_header,
                                                       body_file_type)
                                    except Exception as e:
                                        print "Exception in Body retrive" + str(
                                            e)
                                        content_length = 0
                                        pass

                            except Exception as e:
                                print e
                                pass

                            #if filetype in traffice == jar,class , pdf,flash, execute
                            #save those files

                            out.send(data)
                            count = 0

                    except Exception as e:
                        print e
                        pass
            if count == time_out_max:
                break
Example #49
0
class ConnectionHandler:
    def __init__(self, connection, address, timeout):
        self.body_file = ""
        self.p = HttpParser()
        self.body = []
        self.request_url = ""
        self.response_header = []
        self.header_done = False
        self.url = ""
        self.controller = []
        self.controller_ip = []
        self.client = connection
        self.client_buffer = ''
        self.timeout = timeout
        self.method, self.path, self.protocol = self.get_base_header()
        if self.method == 'CONNECT':
            self.method_CONNECT()
        elif self.method in ('OPTIONS', 'GET', 'HEAD', 'POST', 'PUT', 'DELETE',
                             'TRACE'):
            self.method_others()
        self.client.close()
        self.target.close()
        #clear
        #print self.controller , self.controller_ip

    def get_base_header(self):
        while 1:
            self.client_buffer += self.client.recv(BUFLEN)
            end = self.client_buffer.find('\n')
            if end != -1:
                break
        #We dont wann those google.com urls.
        if not "127.0.0.1" in self.client_buffer[:end]:

            #Insert Url into database here
            self.url = '%s' % self.client_buffer[:end]

        data = (self.client_buffer[:end + 1]).split()
        self.client_buffer = self.client_buffer[end + 1:]
        #print data
        return data

    def method_CONNECT(self):
        self._connect_target(self.path)
        self.client.send(HTTPVER + ' 200 Connection established\n' +
                         'Proxy-agent: %s\n\n' % VERSION)
        self.client_buffer = ''
        self._read_write()

    def method_others(self):
        self.path = self.path[7:]
        i = self.path.find('/')
        host = self.path[:i]
        path = self.path[i:]
        self._connect_target(host)
        self.target.send('%s %s %s\n' % (self.method, path, self.protocol) +
                         self.client_buffer)
        self.client_buffer = ''
        self._read_write()

    def _connect_target(self, host):
        i = host.find(':')
        if i != -1:
            port = int(host[i + 1:])
            host = host[:i]
            #print host
        else:
            port = 80
        try:

            (soc_family, _, _, _, address) = socket.getaddrinfo(host, port)[0]
            self.target = socket.socket(soc_family)
            self.target.connect(address)

        except Exception as e:
            address = host
            print "Error Connecting to:" + str(address)
            connect_ip = "Error Connecting to:" + str(address)
            update_traffic_link(urlid, connect_ip, "Unable to Connect", "Nil",
                                "")
            # insert to db here
            #Concat data to string
        self.request_url = str(host) + " | " + str(address) + " | " + str(
            self.url)  #debug
        #print self.request_url

    def _read_write(self):

        time_out_max = self.timeout / 3
        socs = [self.client, self.target]
        count = 0
        while 1:
            count += 1
            (recv, _, error) = select.select(socs, [], socs, 3)
            if error:
                break
            if recv:
                for in_ in recv:
                    try:

                        #print " Receving Data "
                        data = in_.recv(10000)
                    except Exception as e:
                        print e
                        pass

                    if in_ is self.client:
                        out = self.target
                    else:
                        out = self.client
                    try:

                        if data:
                            #column 25
                            #Dig here to analysis the traffic
                            #print data
                            try:
                                #Lets parse the data using http_parser modules

                                recved = len(data)
                                #
                                #print "We received so far "+ str(recved)
                                nparsed = self.p.execute(data, recved)
                                assert nparsed == recved
                                # Check
                                if self.p.is_headers_complete(
                                ) and not self.header_done:

                                    #Header is an ordered dictionary
                                    header_s = self.p.get_headers()

                                    # Lets beautify it and print it.
                                    for header, value in header_s.items():

                                        #Print Response
                                        # Connection : close format
                                        res_header = header + ": " + value
                                        self.response_header.append(res_header)

                                        self.header_done = True
                                    #Put header to Database.

                                #Check if the boday is partial, if then append the body
                                if self.p.is_partial_body():

                                    self.body.append(self.p.recv_body())
                                    #print "appending body" +self.p.recv_body()
                                    #Append data body recived to a list
                                    #print self.body

                                # If the parsing of current request is compleated
                                if self.p.is_message_complete():

                                    try:

                                        try:

                                            content_length = self.p.get_headers(
                                            )['content-length']

                                        except Exception as e:
                                            print "Exception in Body retrive-sub" + str(
                                                e)
                                            content_length = 0
                                            pass

                                        self.body_file = "".join(self.body)
                                        body_file_type = ms.buffer(
                                            self.body_file[:1024])
                                        signature_scan = ""
                                        html_source = ""
                                        html_body = ""
                                        html_body = self.body_file
                                        if "gzip" in body_file_type:
                                            try:

                                                print " Decoding GZIp html\n"
                                                html_body = zlib.decompress(
                                                    html_body,
                                                    16 + zlib.MAX_WBITS)
                                                #print "source"+str(html_body)
                                            except Exception as e:
                                                print "Error gzip decoding:" + str(
                                                    e)

                                        print urlid
                                        signature_scan_body = yara_match(
                                            html_body)
                                        signature_scan_request = yara_match(
                                            self.request_url)
                                        signature_scan_response = ""
                                        self_response = ""
                                        try:
                                            #This is a list convert to string and do the check
                                            print self.response_header
                                            self_response = ''.join(
                                                self.response_header)
                                            if "Content-Disposition:" in self_response and "attachment;" in self_response:
                                                signature_scan_response = "Forced-file-download"
                                                print " Signatured matched in response"

                                        except Exception as e:
                                            print e, "Error in header_match"
                                        signature_scan = str(
                                            signature_scan_body) + "" + str(
                                                signature_scan_request
                                            ) + "" + signature_scan_response

                                        #print self.request_url
                                        #print self.response_header
                                        #print body_file_type

                                        if len(signature_scan) > 6:
                                            try:

                                                print " Signatured found and Updating\n"
                                                body_file_type = "Signature_Matched: " + signature_scan + " ing " + body_file_type
                                                insert_html(
                                                    urlid, html_body,
                                                    signature_scan)
                                                html_source = html_body

                                            except Exception as e:
                                                print "Error in Traffic Signature" + str(
                                                    e)

                                        print " Trffic Updated\n"
                                        update_traffic_link(
                                            urlid, self.request_url,
                                            self.response_header,
                                            body_file_type, html_source)

                                        if "executable" in body_file_type:
                                            print "\nExecutable found\n"
                                            binary_found(urlid)

                                    except Exception as e:
                                        print "Exception in Body retrive" + str(
                                            e)
                                        content_length = 0
                                        pass

                            except Exception as e:
                                print e
                                pass

                            #if filetype in traffice == jar,class , pdf,flash, execute
                            #save those files

                            out.send(data)
                            count = 0

                    except Exception as e:
                        print e
                        pass
            if count == time_out_max:
                break
Example #50
0
class QHttpConnection(QObject):
    newRequest = pyqtSignal(QHttpRequest, QHttpResponse)
    disconnected = pyqtSignal()
    
    def __init__(self, sock, parent = None):
        super(QHttpConnection, self).__init__(parent)

        self.m_sock = sock
        self.m_body = []
        self.m_parser = HttpParser()

        self.m_request = QHttpRequest(self)
        self.m_request = None
        self.m_response = QHttpResponse(self)
        self.m_response = None
        
        self.m_sock.readyRead.connect(self._onReadyRead)
        self.m_sock.disconnected.connect(self._onDisconnected)
        self.m_sock.bytesWritten.connect(self._onBytesWritten)
        
        return
    
    def write(self, data):
        self.m_sock.write(data)
        return

    def _onReadyRead(self):
        #qDebug('hehe')
        qtdata = self.m_sock.readAll()
        pydata = qtdata.data()
        np = self.m_parser.execute(pydata, len(pydata))
        qDebug(str(np) + '=?' + str(len(pydata)))
        #qDebug(qtdata)
        #qDebug(qtdata.toHex())
        #print(self.m_parser._body)
        #print(self.m_parser._body)

        #qDebug(str(self.m_parser.is_message_begin()))
        #qDebug(str(self.m_parser.is_message_complete()))
        #qDebug(str(self.m_parser.is_headers_complete()))

        if self.m_parser.is_headers_complete():
            if self.m_request != None:
                qWarning('alread have a request object')
            else:
                self.m_request = QHttpRequest(self)
                _qogc.add(self.m_request)
                # qDebug(str(self.m_request))
                # print(self.m_parser.get_headers())
            True

        ### body area
        # qDebug(str(self.m_parser.is_message_begin()))
        # not use lines,这个可能指的是在客户端时,数据下载完成标识吧。
        if self.m_parser.is_message_begin() and self.m_request != None:
            qDebug('body coming...')
            self.m_request.hasBody()
            
        mth = self.m_parser.get_method()
        # qDebug(mth)
            
        if mth == 'GET':
            if self.m_parser.is_headers_complete():
                self.m_response = QHttpResponse(self)
                self.m_response.done.connect(self._onResponseDone)
                _qogc.add(self.m_response)

                self.newRequest.emit(self.m_request, self.m_response)
            pass
        elif mth == 'POST':
            if self.m_parser.is_partial_body(): self.m_body.append(self.m_parser.recv_body())
            if self.m_parser.is_message_complete(): print(b''.join(self.m_body))
        elif mth == 'CONNECT':
            if self.m_parser.is_headers_complete():
                if self.m_response != None:
                    qWarning('alread have a response object')
                else:
                    self.m_response = QHttpResponse(self)
                    self.m_response.done.connect(self._onResponseDone)
                    _qogc.add(self.m_response)

                    self.newRequest.emit(self.m_request, self.m_response)
            else:
                qDebug('hdr not complete')
            True
        else:
            qWarning("not impled method:" + mth)
            self.m_sock.close()
        
        return

    def _onDisconnected(self):
        # qDebug('hehe')
        self.disconnected.emit()
        return

    def _onBytesWritten(self, count):
        # qDebug('hehe')
        return

    def _onResponseDone(self):
        self.m_sock.disconnectFromHost()
        self.m_sock.close()
        # qDebug(str(self.m_request))
        return

    def close(self):
        self.m_sock.flush()
        self.m_sock.close()
        return

    def last(self): return
Example #51
0
File: http.py Project: bjornua/dna
class HttpStream(object):
    """ An HTTP parser providing higher-level access to a readable,
    sequential io.RawIOBase object. You can use implementions of
    http_parser.reader (IterReader, StringReader, SocketReader) or 
    create your own.
    """
    def __init__(self, stream, kind=HTTP_BOTH, decompress=False):
        """ constructor of HttpStream. 

        :attr stream: an io.RawIOBase object
        :attr kind: Int,  could be 0 to parseonly requests, 
        1 to parse only responses or 2 if we want to let
        the parser detect the type.
        """
        self.parser = HttpParser(kind=kind, decompress=decompress)
        self.stream = stream

    def _check_headers_complete(self):
        if self.parser.is_headers_complete():
            return

        while True:
            try:
                data = self.next()
            except StopIteration:
                if self.parser.is_headers_complete():
                    return
                raise NoMoreData()

            if self.parser.is_headers_complete():
                return

    def url(self):
        """ get full url of the request """
        self._check_headers_complete()
        return self.parser.get_url()

    def path(self):
        """ get path of the request (url without query string and
        fragment """
        self._check_headers_complete()
        return self.parser.get_path()

    def query_string(self):
        """ get query string of the url """
        self._check_headers_complete()
        return self.parser.get_query_string()

    def fragment(self):
        """ get fragment of the url """
        self._check_headers_complete()
        return self.parser.get_fragment()

    def version(self):
        self._check_headers_complete()
        return self.parser.get_version()

    def status_code(self):
        """ get status code of a response as integer """
        self._check_headers_complete()
        return self.parser.get_status_code()

    def status(self):
        """ return complete status with reason """
        status_code = self.status_code()
        reason = status_reasons.get(int(status_code), 'unknown')
        return "%s %s" % (status_code, reason)

    def method(self):
        """ get HTTP method as string"""
        self._check_headers_complete()
        return self.parser.get_method()

    def headers(self):
        """ get request/response headers, headers are returned in a
        OrderedDict that allows you to get value using insensitive
        keys."""
        self._check_headers_complete()
        return self.parser.get_headers()

    def should_keep_alive(self):
        """ return True if the connection should be kept alive
        """
        self._check_headers_complete()
        return self.parser.should_keep_alive()

    def is_chunked(self):
        """ return True if Transfer-Encoding header value is chunked"""
        self._check_headers_complete()
        return self.parser.is_chunked()

    def wsgi_environ(self, initial=None):
        """ get WSGI environ based on the current request.
        
        :attr initial: dict, initial values to fill in environ.
        """
        self._check_headers_complete()
        return self.parser.get_wsgi_environ()

    def body_file(self,
                  buffering=None,
                  binary=True,
                  encoding=None,
                  errors=None,
                  newline=None):
        """ return the body as a buffered stream object. If binary is
        true an io.BufferedReader will be returned, else an
        io.TextIOWrapper.
        """
        self._check_headers_complete()

        if buffering is None:
            buffering = -1
        if buffering < 0:
            buffering = DEFAULT_BUFFER_SIZE

        raw = HttpBodyReader(self)
        buffer = BufferedReader(raw, buffering)
        if binary:
            return buffer
        text = TextIOWrapper(buffer, encoding, errors, newline)
        return text

    def body_string(self,
                    binary=True,
                    encoding=None,
                    errors=None,
                    newline=None):
        """ return body as string """
        return self.body_file(binary=binary,
                              encoding=encoding,
                              newline=newline).read()

    def __iter__(self):
        return self

    def next(self):
        if self.parser.is_message_complete():
            raise StopIteration

        # fetch data
        b = bytearray(DEFAULT_BUFFER_SIZE)
        recved = self.stream.readinto(b)
        if recved is None:
            raise NoMoreData("no more data")

        del b[recved:]

        # parse data
        nparsed = self.parser.execute(bytes(b), recved)
        if nparsed != recved and not self.parser.is_message_complete():
            raise ParserError("nparsed != recved")

        if recved == 0:
            raise StopIteration

        return bytes(b)
Example #52
0
    def request(self, method, url, headers=None, body=None, timeout=None):
        '''Issues a `method` request to `path` on the
        connected server.  Sends along `headers`, and
        body.

        Very low level--you must set "host" yourself,
        for example.  It will set Content-Length,
        however.
        '''
        headers = headers or {}
        url_info = urlparse(url)
        fake_wsgi = dict(
        (cgi_name(n), str(v).strip()) for n, v in headers.iteritems())

        if body and 'CONTENT_LENGTH' not in fake_wsgi:
            # If the caller hasn't set their own Content-Length but submitted
            # a body, we auto-set the Content-Length header here.
            fake_wsgi['CONTENT_LENGTH'] = str(len(body))

        fake_wsgi.update({
            'REQUEST_METHOD' : method,
            'SCRIPT_NAME' : '',
            'PATH_INFO' : url_info[2],
            'QUERY_STRING' : url_info[4],
            'wsgi.version' : (1,0),
            'wsgi.url_scheme' : 'http', # XXX incomplete
            'wsgi.input' : cStringIO.StringIO(body or ''),
            'wsgi.errors' : FileLikeErrorLogger(hlog),
            'wsgi.multithread' : False,
            'wsgi.multiprocess' : False,
            'wsgi.run_once' : False,
            })
        req = Request(fake_wsgi)

        timeout_handler = TimeoutHandler(timeout or 60)

        url = str(req.path)
        if req.query_string:
            url += '?' + str(req.query_string)

        send('%s %s HTTP/1.1\r\n%s' % (req.method, url, str(req.headers)))

        if body:
            send(body)

        h = HttpParser()
        body = []
        data = None
        while True:
            if data:
                used = h.execute(data, len(data))
                if h.is_headers_complete():
                    body.append(h.recv_body())
                if h.is_message_complete():
                    data = data[used:]
                    break
            ev, val = first(receive_any=True, sleep=timeout_handler.remaining())
            if ev == 'sleep': timeout_handler.timeout()
            data = val

        resp = Response(
            response=''.join(body),
            status=h.get_status_code(),
            headers=h.get_headers(),
            )

        return resp
    body = []
    while True:
        data = clientsocket.recv(1024)
        if not data:
            break
        
        recved = len(data)
        nparsed = parser.execute(data, recved)
        assert nparsed == recved

        if parser.is_headers_complete():
            print parser.get_method()
            print parser.get_path()

        if parser.is_partial_body():
            body.append(parser.recv_body())

        if parser.is_message_complete():
            break
    
    print ''.join(body)

    result = process_request(parser.get_method(),
                             parser.get_path(),
                             dict([x.split('=') for x in ''.join(body).split('&') if len(x.split('=')) == 2]))
    result += '\n'
    clientsocket.send(response + str(result))
    print result

    clientsocket.close()
Example #54
0
class QHttpConnection(QObject):
    newRequest = pyqtSignal(QHttpRequest, QHttpResponse)
    disconnected = pyqtSignal()

    def __init__(self, sock, parent=None):
        super(QHttpConnection, self).__init__(parent)

        self.m_sock = sock
        self.m_body = []
        self.m_parser = HttpParser()

        self.m_request = QHttpRequest(self)
        self.m_request = None
        self.m_response = QHttpResponse(self)
        self.m_response = None

        self.m_sock.readyRead.connect(self._onReadyRead)
        self.m_sock.disconnected.connect(self._onDisconnected)
        self.m_sock.bytesWritten.connect(self._onBytesWritten)

        return

    def write(self, data):
        self.m_sock.write(data)
        return

    def _onReadyRead(self):
        #qDebug('hehe')
        qtdata = self.m_sock.readAll()
        pydata = qtdata.data()
        np = self.m_parser.execute(pydata, len(pydata))
        qDebug(str(np) + '=?' + str(len(pydata)))
        #qDebug(qtdata)
        #qDebug(qtdata.toHex())
        #print(self.m_parser._body)
        #print(self.m_parser._body)

        #qDebug(str(self.m_parser.is_message_begin()))
        #qDebug(str(self.m_parser.is_message_complete()))
        #qDebug(str(self.m_parser.is_headers_complete()))

        if self.m_parser.is_headers_complete():
            if self.m_request != None:
                qWarning('alread have a request object')
            else:
                self.m_request = QHttpRequest(self)
                _qogc.add(self.m_request)
                # qDebug(str(self.m_request))
                # print(self.m_parser.get_headers())
            True

        ### body area
        # qDebug(str(self.m_parser.is_message_begin()))
        # not use lines,这个可能指的是在客户端时,数据下载完成标识吧。
        if self.m_parser.is_message_begin() and self.m_request != None:
            qDebug('body coming...')
            self.m_request.hasBody()

        mth = self.m_parser.get_method()
        # qDebug(mth)

        if mth == 'GET':
            if self.m_parser.is_headers_complete():
                self.m_response = QHttpResponse(self)
                self.m_response.done.connect(self._onResponseDone)
                _qogc.add(self.m_response)

                self.newRequest.emit(self.m_request, self.m_response)
            pass
        elif mth == 'POST':
            if self.m_parser.is_partial_body():
                self.m_body.append(self.m_parser.recv_body())
            if self.m_parser.is_message_complete():
                print(b''.join(self.m_body))
        elif mth == 'CONNECT':
            if self.m_parser.is_headers_complete():
                if self.m_response != None:
                    qWarning('alread have a response object')
                else:
                    self.m_response = QHttpResponse(self)
                    self.m_response.done.connect(self._onResponseDone)
                    _qogc.add(self.m_response)

                    self.newRequest.emit(self.m_request, self.m_response)
            else:
                qDebug('hdr not complete')
            True
        else:
            qWarning("not impled method:" + mth)
            self.m_sock.close()

        return

    def _onDisconnected(self):
        # qDebug('hehe')
        self.disconnected.emit()
        return

    def _onBytesWritten(self, count):
        # qDebug('hehe')
        return

    def _onResponseDone(self):
        self.m_sock.disconnectFromHost()
        self.m_sock.close()
        # qDebug(str(self.m_request))
        return

    def close(self):
        self.m_sock.flush()
        self.m_sock.close()
        return

    def last(self):
        return
Example #55
0
class HTTPProtocol(FlowControlMixin, asyncio.Protocol):

    def __init__(self, stream_reader, callback, loop=None):
        super().__init__(loop=loop)
        self._stream_reader = stream_reader
        self._stream_writer = None

        self._callback = callback
        self._task = None

        self._server = None

    def connection_made(self, transport):
        self._parser = HttpParser()

        self._stream_reader.set_transport(transport)
        self._stream_writer = asyncio.StreamWriter(
            transport,
            self,
            self._stream_reader,
            self._loop,
        )

        # Grab the name of our socket if we have it
        self._server = transport.get_extra_info("sockname")

    def connection_lost(self, exc):
        if exc is None:
            self._stream_reader.feed_eof()
        else:
            self._stream_reader.set_exception(exc)

        super().connection_lost(exc)

    def data_received(self, data):
        # Parse our incoming data with our HTTP parser
        self._parser.execute(data, len(data))

        # If we have not already handled the headers and we've gotten all of
        # them, then invoke the callback with the headers in them.
        if self._task is None and self._parser.is_headers_complete():
            coro = self.dispatch(
                {
                    "server": self._server,
                    "protocol": b"HTTP/" + b".".join(
                        str(x).encode("ascii")
                        for x in self._parser.get_version()
                    ),
                    "method": self._parser.get_method().encode("latin1"),
                    "path": self._parser.get_path().encode("latin1"),
                    "query": self._parser.get_query_string().encode("latin1"),
                    "headers": self._parser.get_headers(),
                },
                self._stream_reader,
                self._stream_writer,
            )
            self._task = asyncio.Task(coro, loop=self._loop)

        # Determine if we have any data in the body buffer and if so feed it
        # to our StreamReader
        if self._parser.is_partial_body():
            self._stream_reader.feed_data(self._parser.recv_body())

        # Determine if we've completed the end of the HTTP request, if we have
        # then we should close our stream reader because there is nothing more
        # to read.
        if self._parser.is_message_complete():
            self._stream_reader.feed_eof()

    def eof_received(self):
        # We've gotten an EOF from the client, so we'll propagate this to our
        # StreamReader
        self._stream_reader.feed_eof()

    @asyncio.coroutine
    def dispatch(self, request, request_body, response):
        # Get the status, headers, and body from the callback. The body must
        # be iterable, and each item can either be a bytes object, or an
        # asyncio coroutine, in which case we'll ``yield from`` on it to wait
        # for it's value.
        status, resp_headers, body = yield from self._callback(
            request,
            request_body,
        )

        # Write out the status line to the client for this request
        # TODO: We probably don't want to hard code HTTP/1.1 here
        response.write(b"HTTP/1.1 " + status + b"\r\n")

        # Write out the headers, taking special care to ensure that any
        # mandatory headers are added.
        # TODO: We need to handle some required headers
        for key, values in resp_headers.items():
            # In order to handle headers which need to have multiple values
            # like Set-Cookie, we allow the value of the header to be an
            # iterable instead of a bytes object, in which case we'll write
            # multiple header lines for this header.
            if isinstance(values, (bytes, bytearray)):
                values = [values]

            for value in values:
                response.write(key + b": " + value + b"\r\n")

        # Before we get to the body, we need to write a blank line to separate
        # the headers and the response body
        response.write(b"\r\n")

        for chunk in body:
            # If the chunk is a coroutine, then we want to wait for the result
            # before we write it.
            if asyncio.iscoroutine(chunk):
                chunk = yield from chunk

            # Write our chunk out to the connect client
            response.write(chunk)

        # We've written everything in our iterator, so we want to close the
        # connection.
        response.close()
Example #56
0
class HttpStream(object):
    """ An HTTP parser providing higher-level access to a readable,
    sequential io.RawIOBase object. You can use implementions of
    http_parser.reader (IterReader, StringReader, SocketReader) or
    create your own.
    """

    def __init__(self, stream, kind=HTTP_BOTH, decompress=False):
        """ constructor of HttpStream.

        :attr stream: an io.RawIOBase object
        :attr kind: Int,  could be 0 to parseonly requests,
        1 to parse only responses or 2 if we want to let
        the parser detect the type.
        """
        self.parser = HttpParser(kind=kind, decompress=decompress)
        self.stream = stream

    def _check_headers_complete(self):
        if self.parser.is_headers_complete():
            return

        while True:
            try:
                next(self)
            except StopIteration:
                if self.parser.is_headers_complete():
                    return
                raise NoMoreData("Can't parse headers")

            if self.parser.is_headers_complete():
                return


    def _wait_status_line(self, cond):
        if self.parser.is_headers_complete():
            return True

        data = []
        if not cond():
            while True:
                try:
                    d = next(self)
                    data.append(d)
                except StopIteration:
                    if self.parser.is_headers_complete():
                        return True
                    raise BadStatusLine(b"".join(data))
                if cond():
                    return True
        return True

    def _wait_on_url(self):
        return self._wait_status_line(self.parser.get_url)

    def _wait_on_status(self):
        return self._wait_status_line(self.parser.get_status_code)

    def url(self):
        """ get full url of the request """
        self._wait_on_url()
        return self.parser.get_url()

    def path(self):
        """ get path of the request (url without query string and
        fragment """
        self._wait_on_url()
        return self.parser.get_path()

    def query_string(self):
        """ get query string of the url """
        self._wait_on_url()
        return self.parser.get_query_string()

    def fragment(self):
        """ get fragment of the url """
        self._wait_on_url()
        return self.parser.get_fragment()

    def version(self):
        self._wait_on_status()
        return self.parser.get_version()

    def status_code(self):
        """ get status code of a response as integer """
        self._wait_on_status()
        return self.parser.get_status_code()

    def status(self):
        """ return complete status with reason """
        status_code = self.status_code()
        reason = status_reasons.get(int(status_code), 'unknown')
        return "%s %s" % (status_code, reason)


    def method(self):
        """ get HTTP method as string"""
        self._wait_on_status()
        return self.parser.get_method()

    def headers(self):
        """ get request/response headers, headers are returned in a
        OrderedDict that allows you to get value using insensitive
        keys."""
        self._check_headers_complete()
        return self.parser.get_headers()

    def should_keep_alive(self):
        """ return True if the connection should be kept alive
        """
        self._check_headers_complete()
        return self.parser.should_keep_alive()

    def is_chunked(self):
        """ return True if Transfer-Encoding header value is chunked"""
        self._check_headers_complete()
        return self.parser.is_chunked()

    def wsgi_environ(self, initial=None):
        """ get WSGI environ based on the current request.

        :attr initial: dict, initial values to fill in environ.
        """
        self._check_headers_complete()
        return self.parser.get_wsgi_environ()

    def body_file(self, buffering=None, binary=True, encoding=None,
            errors=None, newline=None):
        """ return the body as a buffered stream object. If binary is
        true an io.BufferedReader will be returned, else an
        io.TextIOWrapper.
        """
        self._check_headers_complete()

        if buffering is None:
            buffering = -1
        if buffering < 0:
            buffering = DEFAULT_BUFFER_SIZE

        raw = HttpBodyReader(self)
        buf = BufferedReader(raw, buffering)
        if binary:
            return buf
        text = TextIOWrapper(buf, encoding, errors, newline)
        return text

    def body_string(self, binary=True, encoding=None, errors=None,
            newline=None):
        """ return body as string """
        return self.body_file(binary=binary, encoding=encoding,
                newline=newline).read()

    def __iter__(self):
        return self

    def __next__(self):
        if self.parser.is_message_complete():
            raise StopIteration

        # fetch data
        b = bytearray(DEFAULT_BUFFER_SIZE)
        recved = self.stream.readinto(b)
        if recved is None:
            raise NoMoreData("no more data")

        del b[recved:]
        to_parse = bytes(b)
        # parse data
        nparsed = self.parser.execute(to_parse, recved)
        if nparsed != recved and not self.parser.is_message_complete():
            raise ParserError("nparsed != recved (%s != %s) [%s]" % (nparsed,
                recved, bytes_to_str(to_parse)))

        if recved == 0:
            raise StopIteration

        return to_parse

    next = __next__
Example #57
0
    def recvrawsocket2(sockobj, address):
        try:

            logger.error(sockobj)
            request_path = ""
            body = []
            p = HttpParser()
            seqid = uuid.uuid1()
            requestdict = dict()
            requestdict['sock'] = sockobj
            #                requestdatetime = time.strftime('%Y.%m.%d.%H.%M.%S', time.localtime(time.time()))
            requestdatetime = time.time()
            requestdict['requestdatetime'] = requestdatetime
            responsesocketdict[seqid.__str__()] = requestdict
            logger.debug("responsesocketdict len = %d",
                         len(responsesocketdict))

            while True:
                request = sockobj.recv(recv_buf_len)
                #                    logger.warning("request  : %s" % (request))

                recved = len(request)
                #                    logger.warning("recved   : %d" % (recved))

                if (recved == 0):
                    logger.warning("socket is closed by peer %r" % (sockobj))
                    sockobj.close()
                    break

                nparsed = p.execute(request, recved)
                logger.warning("nparsed  : %d" % (nparsed))
                if nparsed != recved:
                    logger.warning("parse error")
                    sockobj.close()
                    break

                if p.is_headers_complete():
                    request_headers = p.get_headers()
    #                        for key in request_headers:
    #                        logger.debug("headers complete %s" % (request_headers.__str__()))

    #                        logger.warning("headers complete")

                if p.is_partial_body():
                    body.append(p.recv_body())
    #                        logger.warning("body  : %s" % (body))

                if p.is_message_complete():
                    #                        logger.warning("message complete")
                    break

            content = "".join(body)

            #                seqid = uuid.uuid1()

            routekey = ""
            servicepath = ""

            # 如果是/xxx格式认为是route key,如果是/xxx/yyy/zzz格式认为是dest service
            request_path = p.get_path()[1:]

            #                logger.warning('PublishThread request_path (%s), is routekey (%d)' % (request_path, request_path.find('/')))
            #                logger.debug("content : %s" % (content))

            servicelist = os.listdir('./apps')

            if request_path.find('/') == -1 and len(
                    request_path) and request_path in servicelist:

                routekey = "A:Queue:%s" % request_path
                if request_path in _config:
                    routekey = _config[request_path]['Consumer_Queue_Name']

                if len(content) == 0:
                    content_json = dict()
                else:
                    content_json = json.loads(content)

                content_json['sockid'] = seqid.__str__()
                content_json['from'] = selfqueuename
                _redis.lpush(routekey, json.dumps(content_json))
            else:
                ret = dict()
                ret['error_code'] = '40004'
                sockobj.sendall('HTTP/1.1 200 OK\n\n%s' % (json.dumps(ret)))
                sockobj.shutdown(socket.SHUT_WR)
                sockobj.close()

    #                requestdict = dict()
    #                requestdict['sock'] = sockobj
    #                requestdatetime = time.strftime(
    #                    '%Y.%m.%d.%H.%M.%S', time.localtime(time.time()))
    #                requestdict['requestdatetime'] = requestdatetime
    #                responsesocketdict[seqid.__str__()] = requestdict

    # sockobj.sendall('HTTP/1.1 200 OK\n\nWelcome %s' % (
    #    seqid))
    # sockobj.close()

        except Exception as e:
            logger.error("recvrawsocket2 %s except raised : %s " %
                         (e.__class__, e.args))
Example #58
0
class HTTPSession(base_object.BaseObject):

    _http_header = ""
    _method = ""
    _version = ""
    _req_obj = ""
    _user_agent = "User-Agent: COS-598C-Project-Client\r\n"
    _accept = "Accept: */*\r\n"
    _accept_enc = "Accept-Encoding: *\r\n"
    _accept_charset = "Accept-Charset: *\r\n"
    _host = ""
    _writer = ""
    _closeable = False
    _http_parser = ""
    _nr_bytes = 0

    def __init__(self, method, req_obj, version):
        self._method = method
        self._req_obj = req_obj
        self._version = version
        self._http_parser = HttpParser()

    def _build_first_line(self):
        first_line = self._method + " " + self._req_obj + " " + self._version + "\r\n"
        return first_line

    def set_host(self, host):
        self._host = "Host: " + host + "\r\n"

    def set_writer(self, writer):
        self._writer = writer

    def write_response(self, data):
        recved = len(data)
        nparsed = self._http_parser.execute(data, recved)
        assert nparsed == recved
        self._nr_bytes += recved
        if self._http_parser.is_partial_body():
            self._writer.write(str(self._http_parser.recv_body()))

        if self._http_parser.is_message_complete():
            self._closeable = True
        return self._nr_bytes

    def get_response_headers(self):
        if self._http_parser.is_headers_complete():
            return self._http_parser.get_headers()

    def closeable(self):
        return self._closeable

    def set_port(self, port):
        return

    def get_request(self):
        self._http_header = self._build_first_line()+\
           self._host+\
           self._user_agent+\
           self._accept+\
           self._accept_enc+\
           self._accept_charset+\
           "\r\n"
        return self._http_header
Example #59
0
    def request(self, method, url, headers=None, body=None, timeout=None):
        """Issues a `method` request to `path` on the
        connected server.  Sends along `headers`, and
        body.

        Very low level--you must set "host" yourself,
        for example.  It will set Content-Length,
        however.
        """
        headers = headers or {}
        url_info = urlparse(url)
        fake_wsgi = dict((cgi_name(n), str(v).strip()) for n, v in headers.iteritems())

        if body and "CONTENT_LENGTH" not in fake_wsgi:
            # If the caller hasn't set their own Content-Length but submitted
            # a body, we auto-set the Content-Length header here.
            fake_wsgi["CONTENT_LENGTH"] = str(len(body))

        fake_wsgi.update(
            {
                "REQUEST_METHOD": method,
                "SCRIPT_NAME": "",
                "PATH_INFO": url_info[2],
                "QUERY_STRING": url_info[4],
                "wsgi.version": (1, 0),
                "wsgi.url_scheme": "http",  # XXX incomplete
                "wsgi.input": cStringIO.StringIO(body or ""),
                "wsgi.errors": FileLikeErrorLogger(hlog),
                "wsgi.multithread": False,
                "wsgi.multiprocess": False,
                "wsgi.run_once": False,
            }
        )
        req = Request(fake_wsgi)

        timeout_handler = TimeoutHandler(timeout or 60)

        url = str(req.path)
        if req.query_string:
            url += "?" + str(req.query_string)

        send("%s %s HTTP/1.1\r\n%s" % (req.method, url, str(req.headers)))

        if body:
            send(body)

        h = HttpParser()
        body = []
        data = None
        while True:
            if data:
                used = h.execute(data, len(data))
                if h.is_headers_complete():
                    body.append(h.recv_body())
                if h.is_message_complete():
                    data = data[used:]
                    break
            ev, val = first(receive_any=True, sleep=timeout_handler.remaining())
            if ev == "sleep":
                timeout_handler.timeout()
            data = val

        resp = Response(response="".join(body), status=h.get_status_code(), headers=h.get_headers())

        return resp