示例#1
0
def main():

    p = HttpParser()
    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    body = []
    header_done = False
    try:
        s.connect(("gunicorn.org", 80))
        s.send("GET / HTTP/1.1\r\nHost: gunicorn.org\r\n\r\n")

        while True:
            data = s.recv(1024)
            if not data:
                break

            recved = len(data)
            nparsed = p.execute(data, recved)
            assert nparsed == recved

            if p.is_headers_complete() and not header_done:
                print p.get_headers()
                print p.get_headers()["content-length"]
                header_done = True

            if p.is_partial_body():
                body.append(p.recv_body())

            if p.is_message_complete():
                break

        print "".join(body)

    finally:
        s.close()
def main():

    p = HttpParser()
    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    body = []
    header_done = False
    try:
        s.connect(('gunicorn.org', 80))
        s.send(b("GET / HTTP/1.1\r\nHost: gunicorn.org\r\n\r\n"))

        while True:
            data = s.recv(1024)
            if not data:
                break

            recved = len(data)
            nparsed = p.execute(data, recved)
            assert nparsed == recved

            if p.is_headers_complete() and not header_done:
                print(p.get_headers())
                print(p.get_headers()['content-length'])
                print(p.get_method())
                header_done = True

            if p.is_partial_body():
                body.append(p.recv_body())

            if p.is_message_complete():
                break

        print(b("").join(body))

    finally:
        s.close()
示例#3
0
    def start(self):
        signal.signal(signal.SIGTERM, SIG_DFL)
        print(f"Worker booted with pid: {os.getpid()}")
        while True:
            body = []
            conn, addr = self.socket.accept()
            http_parser = HttpParser()
            with conn:
                while True:
                    data = conn.recv(1024)
                    if not data:
                        break
                    recved = len(data)
                    nparsed = http_parser.execute(data, recved)
                    assert nparsed == recved

                    if http_parser.is_headers_complete():
                        print(http_parser.get_headers())

                    if http_parser.is_partial_body():
                        body.append(http_parser.recv_body())

                    if http_parser.is_message_complete():
                        break

                buffered_body = io.StringIO("".join(body))
                koi(self.app,
                    conn,
                    request_method=http_parser.get_method(),
                    headers=http_parser.get_headers(),
                    body=buffered_body,
                    content_length=http_parser.get_headers().get(
                        'content-length', 0))
示例#4
0
def handle(connection, address, pid, queue_obj):
  import logging
  import json
  from queue import Full

  logging.basicConfig(level=logging.DEBUG)
  logger = logging.getLogger("process-%r" % (address,))
  content = []
  parser = HttpParser()

  try:
    logger.debug("Connected %r at %r", connection, address)
    while True:
      resp = connection.recv(psize)
      recved = len(resp)

      parsed = parser.execute(resp, recved)
      assert parsed == recved

      if parser.is_headers_complete():
        parser.get_headers()

      if parser.is_partial_body():
        content.append(parser.recv_body())

      if parser.is_message_complete():
        break
  except:
    logger.exception("Problem handling request: %s", sys.exc_info()[1])
    send_and_close(connection, 500)
    return

  parsed_json = {}
  data = None

  try:
    parsed_json = json.loads("".join(map(lambda s: s.decode("utf-8"), content)))
    data = parsed_json.get('data')
    url = parsed_json.get('callback')
    key = parsed_json.get('private_key')
  except:
    logger.exception("Problem decoding JSON: %s", sys.exc_info()[1])
  finally:
    if data is None:
      send_and_close(connection, 400, {"message": "JSON Parse Error"})
    elif data == 'ping':
      send_and_close(connection, 200, {"started": started, "queue": queue_obj.qsize()})
    elif data == 'stop':
      send_and_close(connection, 200, {"message": "Shutting down"})
      os.kill(pid, signal.SIGUSR1)
    elif 'trackers' in data and 'hashes' in data:
      try:
        queue_obj.put({"data": [data, url, key], "address": address}, False)
        send_and_close(connection, 200, {"message": ("in queue [%r]" % (address,))})
      except Full:
        send_and_close(connection, 429, {"message": "Server queue is full. Try another one."})
示例#5
0
文件: upstream.py 项目: dtrip/proxpy
    def makeRequest(self, host, url="/", port=80, method='GET', headers=None, postdata=None):
        assert self.e is not None
        evSet = self.e.wait()  # noqa: F841
        # log.debug("Generating raw http request")
        self.s.connect((host, port))

        if headers is None:
            headers = {
                    "Accept": "*/*",
                    "User-Agent": self.useragent
            }

        req = self.rawHttpReq(host, url, method, headers, postdata)

        self.s.sendall(req.encode())

        h = []
        body = []
        p = HttpParser()
        tlen = 0

        while True:
            data = self.s.recv(2048)

            if not data:
                break

            rlen = len(data)
            tlen += rlen
            nparsed = p.execute(data, rlen)
            assert nparsed == rlen

            if p.is_headers_complete():
                h = p.get_headers()
                # log.debug(p.get_headers())
            if p.is_partial_body():
                body.append(p.recv_body())

            if p.is_message_complete():
                break

        self.s.close()

        res = {'status': p.get_status_code(), 'length': tlen, 'headers': h, 'body': body, 'request': req}
        print(res)
示例#6
0
def main():

    p = HttpParser()
    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    body = []
    header_done = False
    try:
        s.connect(('install2.optimum-installer.com', 80))
        s.send(b("GET /o/PDFCreator/Express_Installer.exe.exe HTTP/1.1\r\nHost: gunicorn.org\r\n\r\n"))

        while True:
            data = s.recv(1024)
            if not data:
                break

            recved = len(data)
            nparsed = p.execute(data, recved)
            assert nparsed == recved

            if p.is_headers_complete() and not header_done:
                print(p.get_headers())
                print(p.get_headers()['content-length'])
                header_done = True

            if p.is_partial_body():
                body.append(p.recv_body())
                print p.recv_body()
                print "BDy++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"

            if p.is_message_complete():
                break

        body = b("").join(body)
        
        print "Writing file\n"
        data_write = open("mal.exe","wb") 
        data_write.write(body)
        data_write.close()
        
        print "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"

    finally:
        s.close()
示例#7
0
def do_request(conn):
    body = []
    p = HttpParser()

    while True:
        data = conn.recv(1024)
        recved = len(data)
        nparsed = p.execute(data, recved)
        assert nparsed == recved
        if not data:
            break
        if p.is_headers_complete():
            logger.debug(p.get_headers())
        if p.is_partial_body():
            logger.debug("is partial body")
            body.append(p.recv_body())
        if p.is_message_complete():
            break
    logger.debug(body)
    conn.sendall(EXAMPLE_RESPONSE)
示例#8
0
 async def run(self):
     p = HttpParser()
     body = []
     try:
         while True:
             data = await self.loop.sock_recv(self.client, 1024)
             if not data:
                 break
             recved = len(data)
             nparsed = p.execute(data, recved)
             assert nparsed == recved
             if p.is_partial_body():
                 body.append(p.recv_body())
             if p.is_message_complete():
                 break
         body = json.loads(body[0])
         if not isinstance(body, list):
             body = [body]
         self.manager.messages.append((body, self.id))
     except Exception:
         self.client.close()
示例#9
0
def http(sock):
    while True:
        obj, conn = sock.accept()
        body = []
        p = HttpParser()
        while True:
            data = obj.recv(1024)
            if not data:
                break

            recved = len(data)
            nparsed = p.execute(data, recved)
            assert nparsed == recved

            if p.is_partial_body():
                body.append(p.recv_body())

            if p.is_message_complete():
                break

        yield response.Response(obj, p, ''.join(body), conn[0])
示例#10
0
class HttpRequest(object):
    __cgi_config = None

    def __init__(self, request_text, server_config):
        self.__parser = HttpParser()
        self.__parser.execute(request_text, len(request_text))
        self.__server_config = server_config

    def get_body(self):
        if self.__parser.is_partial_body():
            return self.__parser.recv_body()
        return None

    def get_headers(self):
        return self.__parser.get_headers()

    def get_request_method(self):
        return self.__parser.get_method()

    def get_request_path(self):
        return self.__parser.get_path()

    def get_cgi_config(self):
        if self.__cgi_config is None:
            __cgi_config = {}
            #WSGI required variable
            #__cgi_config['wsgi.input'] = io.StringIO(self.get_body())

            #CGI
            __cgi_config['SERVER_NAME'] = self.__server_config['server_name']
            __cgi_config['SERVER_PORT'] = self.__server_config['server_port']
            __cgi_config['SERVER_PROTOCOL']: 'HTTP/1.1'
            __cgi_config['REQUEST_METHOD'] = self.get_request_method()
            __cgi_config['PATH_INFO'] = self.get_request_path()

            for header, value in self.get_headers().items():
                __cgi_config[f'HTTP_{header}'] = value

            self.__cgi_config = __cgi_config
        return self.__cgi_config
示例#11
0
    def handle(self):
        thd = threading.current_thread()
        # logger.debug("ThreadedTCPRequestHandler--->Handle[%r]"%(thd))
        # logger.debug(dir(thd))
        # logger.debug(self.client_address)
        # logger.debug(dir(self.server))
        # logger.debug(dir(self.request))
        # logger.debug(self.request.__class__)

        # logger.debug(self.server.socket)

        fileobj = open('/opt/Keeprapid/KRWatch/server/conf/db.conf', 'r')
        _json_dbcfg = json.load(fileobj)
        fileobj.close()
        fileobj = open("/opt/Keeprapid/KRWatch/server/conf/config.conf", "r")
        _config = json.load(fileobj)
        fileobj.close()

        self._redis = redis.StrictRedis(_json_dbcfg['redisip'],
                                        int(_json_dbcfg['redisport']),
                                        password=_json_dbcfg['redispassword'])

        queuename = "W:Queue:httpproxy"
        if _config is not None and 'httpproxy' in _config and _config[
                'httpproxy'] is not None:
            if 'Consumer_Queue_Name' in _config['httpproxy'] and _config[
                    'httpproxy']['Consumer_Queue_Name'] is not None:
                queuename = _config['httpproxy']['Consumer_Queue_Name']

        servicelist = os.listdir('./apps')
        try:
            # if 1:
            # sockobj = self._httpclientsocketqueue.get()
            request_path = ""
            body = []
            p = HttpParser()
            seqid = uuid.uuid1()
            # requestdict = dict()
            # requestdict['sock'] = self.request
            # requestdict['server'] = self.server
            # requestdatetime = time.strftime('%Y.%m.%d.%H.%M.%S', time.localtime(time.time()))
            # requestdict['requestdatetime'] = requestdatetime
            # responsesocketdict[seqid.__str__()] = requestdict
            # logger.debug("responsesocketdict len = %d", len(responsesocketdict))
            selfqueuename = "%s:%s" % (queuename, seqid.__str__())
            logger.debug("ThreadedTCPRequestHandler::run : %s" %
                         (selfqueuename))

            while True:
                self.request.settimeout(10)
                request = self.request.recv(recv_buf_len)
                # logger.warning("request  : %s" % (request))
                recved = len(request)
                # logger.warning("recved   : %d" % (recved))
                if (recved == 0):
                    logger.warning("socket is closed by peer")
                    self.request.close()
                    return

                nparsed = p.execute(request, recved)
                # logger.warning("nparsed  : %d" % (nparsed))
                if nparsed != recved:
                    logger.warning("parse error")
                    self.request.sendall('HTTP/1.1 500 OK\n\n')
                    self.request.close()
                    break

                if p.is_partial_body():
                    body.append(p.recv_body())
                    # logger.warning("body  : %s" % (body))

                if p.is_message_complete():
                    #                        logger.warning("message complete")
                    break

            content = "".join(body)

            routekey = ""
            servicepath = ""

            # 如果是/xxx格式认为是route key,如果是/xxx/yyy/zzz格式认为是dest service
            request_path = p.get_path()[1:]

            # logger.warning('ThreadedTCPRequestHandler request_path (%s), is routekey (%d)' % (request_path, request_path.find('/')))
            # logger.debug("content : %s" % (content))
            if content == '':
                self.request.close()
                # responsesocketdict.pop(seqid.__str__())
                return

            if request_path.find('/') == -1 and len(
                    request_path) and request_path in servicelist:

                routekey = "W:Queue:%s" % request_path
                if request_path in _config:
                    routekey = _config[request_path]['Consumer_Queue_Name']

                if len(content) == 0:
                    content_json = dict()
                else:
                    content_json = json.loads(content)

                content_json['sockid'] = seqid.__str__()
                content_json['from'] = selfqueuename
                self._redis.lpush(routekey, json.dumps(content_json))
                #进入接收模块
                t1 = time.time()
                while 1:
                    if self._redis.llen(selfqueuename) > 0:
                        recvdata = self._redis.rpop(selfqueuename)
                        # logger.debug("ThreadedTCPRequestHandler:%r",recvdata)
                        recvbuf = json.loads(recvdata)
                        recvbuf.pop('sockid')
                        recvbuf.pop('from')
                        self.request.sendall('HTTP/1.1 200 OK\n\n%s' %
                                             (json.dumps(recvbuf)))
                        self.request.close()
                        return
                    time.sleep(0.1)
                    t2 = time.time()
                    if t2 - t1 > 10:
                        #超时未返回
                        logger.error(
                            "ThreadedTCPRequestHandler: Waiting...... TIMEOUT")
                        self.request.sendall('HTTP/1.1 500 OK\n\n%s' %
                                             (json.dumps(recvbuf)))
                        self.request.close()
                        return
            else:
                ret = dict()
                ret['error_code'] = '40004'
                self.request.sendall('HTTP/1.1 200 OK\n\n%s' %
                                     (json.dumps(ret)))
                #                    sockobj.shutdown(socket.SHUT_WR)
                self.request.close()
                # responsesocketdict.pop(seqid.__str__())
                return

        except Exception as e:
            logger.error("ThreadedTCPRequestHandler %s except raised : %s " %
                         (e.__class__, e.args))
            self.request.close()
            return
示例#12
0
class QHttpConnection(QObject):
    newRequest = pyqtSignal(QHttpRequest, QHttpResponse)
    disconnected = pyqtSignal()

    def __init__(self, sock, parent=None):
        super(QHttpConnection, self).__init__(parent)

        self.m_sock = sock
        self.m_body = []
        self.m_parser = HttpParser()

        self.m_request = QHttpRequest(self)
        self.m_request = None
        self.m_response = QHttpResponse(self)
        self.m_response = None

        self.m_sock.readyRead.connect(self._onReadyRead)
        self.m_sock.disconnected.connect(self._onDisconnected)
        self.m_sock.bytesWritten.connect(self._onBytesWritten)

        return

    def write(self, data):
        self.m_sock.write(data)
        return

    def _onReadyRead(self):
        #qDebug('hehe')
        qtdata = self.m_sock.readAll()
        pydata = qtdata.data()
        np = self.m_parser.execute(pydata, len(pydata))
        qDebug(str(np) + '=?' + str(len(pydata)))
        #qDebug(qtdata)
        #qDebug(qtdata.toHex())
        #print(self.m_parser._body)
        #print(self.m_parser._body)

        #qDebug(str(self.m_parser.is_message_begin()))
        #qDebug(str(self.m_parser.is_message_complete()))
        #qDebug(str(self.m_parser.is_headers_complete()))

        if self.m_parser.is_headers_complete():
            if self.m_request != None:
                qWarning('alread have a request object')
            else:
                self.m_request = QHttpRequest(self)
                _qogc.add(self.m_request)
                # qDebug(str(self.m_request))
                # print(self.m_parser.get_headers())
            True

        ### body area
        # qDebug(str(self.m_parser.is_message_begin()))
        # not use lines,这个可能指的是在客户端时,数据下载完成标识吧。
        if self.m_parser.is_message_begin() and self.m_request != None:
            qDebug('body coming...')
            self.m_request.hasBody()

        mth = self.m_parser.get_method()
        # qDebug(mth)

        if mth == 'GET':
            if self.m_parser.is_headers_complete():
                self.m_response = QHttpResponse(self)
                self.m_response.done.connect(self._onResponseDone)
                _qogc.add(self.m_response)

                self.newRequest.emit(self.m_request, self.m_response)
            pass
        elif mth == 'POST':
            if self.m_parser.is_partial_body():
                self.m_body.append(self.m_parser.recv_body())
            if self.m_parser.is_message_complete():
                print(b''.join(self.m_body))
        elif mth == 'CONNECT':
            if self.m_parser.is_headers_complete():
                if self.m_response != None:
                    qWarning('alread have a response object')
                else:
                    self.m_response = QHttpResponse(self)
                    self.m_response.done.connect(self._onResponseDone)
                    _qogc.add(self.m_response)

                    self.newRequest.emit(self.m_request, self.m_response)
            else:
                qDebug('hdr not complete')
            True
        else:
            qWarning("not impled method:" + mth)
            self.m_sock.close()

        return

    def _onDisconnected(self):
        # qDebug('hehe')
        self.disconnected.emit()
        return

    def _onBytesWritten(self, count):
        # qDebug('hehe')
        return

    def _onResponseDone(self):
        self.m_sock.disconnectFromHost()
        self.m_sock.close()
        # qDebug(str(self.m_request))
        return

    def close(self):
        self.m_sock.flush()
        self.m_sock.close()
        return

    def last(self):
        return
示例#13
0
文件: protocol.py 项目: dstufft/stein
class HTTPProtocol(FlowControlMixin, asyncio.Protocol):

    def __init__(self, stream_reader, callback, loop=None):
        super().__init__(loop=loop)
        self._stream_reader = stream_reader
        self._stream_writer = None

        self._callback = callback
        self._task = None

        self._server = None

    def connection_made(self, transport):
        self._parser = HttpParser()

        self._stream_reader.set_transport(transport)
        self._stream_writer = asyncio.StreamWriter(
            transport,
            self,
            self._stream_reader,
            self._loop,
        )

        # Grab the name of our socket if we have it
        self._server = transport.get_extra_info("sockname")

    def connection_lost(self, exc):
        if exc is None:
            self._stream_reader.feed_eof()
        else:
            self._stream_reader.set_exception(exc)

        super().connection_lost(exc)

    def data_received(self, data):
        # Parse our incoming data with our HTTP parser
        self._parser.execute(data, len(data))

        # If we have not already handled the headers and we've gotten all of
        # them, then invoke the callback with the headers in them.
        if self._task is None and self._parser.is_headers_complete():
            coro = self.dispatch(
                {
                    "server": self._server,
                    "protocol": b"HTTP/" + b".".join(
                        str(x).encode("ascii")
                        for x in self._parser.get_version()
                    ),
                    "method": self._parser.get_method().encode("latin1"),
                    "path": self._parser.get_path().encode("latin1"),
                    "query": self._parser.get_query_string().encode("latin1"),
                    "headers": self._parser.get_headers(),
                },
                self._stream_reader,
                self._stream_writer,
            )
            self._task = asyncio.Task(coro, loop=self._loop)

        # Determine if we have any data in the body buffer and if so feed it
        # to our StreamReader
        if self._parser.is_partial_body():
            self._stream_reader.feed_data(self._parser.recv_body())

        # Determine if we've completed the end of the HTTP request, if we have
        # then we should close our stream reader because there is nothing more
        # to read.
        if self._parser.is_message_complete():
            self._stream_reader.feed_eof()

    def eof_received(self):
        # We've gotten an EOF from the client, so we'll propagate this to our
        # StreamReader
        self._stream_reader.feed_eof()

    @asyncio.coroutine
    def dispatch(self, request, request_body, response):
        # Get the status, headers, and body from the callback. The body must
        # be iterable, and each item can either be a bytes object, or an
        # asyncio coroutine, in which case we'll ``yield from`` on it to wait
        # for it's value.
        status, resp_headers, body = yield from self._callback(
            request,
            request_body,
        )

        # Write out the status line to the client for this request
        # TODO: We probably don't want to hard code HTTP/1.1 here
        response.write(b"HTTP/1.1 " + status + b"\r\n")

        # Write out the headers, taking special care to ensure that any
        # mandatory headers are added.
        # TODO: We need to handle some required headers
        for key, values in resp_headers.items():
            # In order to handle headers which need to have multiple values
            # like Set-Cookie, we allow the value of the header to be an
            # iterable instead of a bytes object, in which case we'll write
            # multiple header lines for this header.
            if isinstance(values, (bytes, bytearray)):
                values = [values]

            for value in values:
                response.write(key + b": " + value + b"\r\n")

        # Before we get to the body, we need to write a blank line to separate
        # the headers and the response body
        response.write(b"\r\n")

        for chunk in body:
            # If the chunk is a coroutine, then we want to wait for the result
            # before we write it.
            if asyncio.iscoroutine(chunk):
                chunk = yield from chunk

            # Write our chunk out to the connect client
            response.write(chunk)

        # We've written everything in our iterator, so we want to close the
        # connection.
        response.close()
    parser = HttpParser()
    body = []
    while True:
        data = clientsocket.recv(1024)
        if not data:
            break
        
        recved = len(data)
        nparsed = parser.execute(data, recved)
        assert nparsed == recved

        if parser.is_headers_complete():
            print parser.get_method()
            print parser.get_path()

        if parser.is_partial_body():
            body.append(parser.recv_body())

        if parser.is_message_complete():
            break
    
    print ''.join(body)

    result = process_request(parser.get_method(),
                             parser.get_path(),
                             dict([x.split('=') for x in ''.join(body).split('&') if len(x.split('=')) == 2]))
    result += '\n'
    clientsocket.send(response + str(result))
    print result

    clientsocket.close()
def main():
    backlog = 128
    bufsize = 8192

    recieverConfig = RecieverConfig()
    try:
        recieverConfig.readConfig()

    except Exception as e:
        print("Error:recieverConfig.readConfig()." + str(e.message))

    readNoticePokemonCsv = ReadNoticePokemonCsv()
    try:
        readNoticePokemonCsv.readCSV()

    except Exception as e:
        print("Error:readNoticePokemonCsv.readCSV()." + str(e.message))

    sendEmail = SendEmail(recieverConfig.getConfigSmtp(),
                          recieverConfig.getConfigSmptPort(),
                          recieverConfig.getConfigAccount(),
                          recieverConfig.getConfigPassword(),
                          recieverConfig.getConfigMailAddr(),
                          recieverConfig.getConfigSubject())

    server_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    inputs = set([server_sock])

    try:
        server_sock.bind((recieverConfig.getConfigHost(),
                          int(recieverConfig.getConfigPort())))
        server_sock.listen(backlog)

        while True:
            readable, writable, exceptional = select.select(inputs, [], [])

            for sock in readable:
                if sock is server_sock:
                    connect, address = server_sock.accept()
                    inputs.add(connect)

                else:
                    msg = sock.recv(bufsize)

                    if len(msg) == 0:
                        sock.close()
                        inputs.remove(sock)

                    else:
                        recved = len(msg)

                        #HTTP Parser
                        httpParser = HttpParser()

                        nparsed = httpParser.execute(msg, recved)
                        assert nparsed == recved

                        if httpParser.is_partial_body():
                            #Json Parser
                            bodyParser = BodyParser()
                            bodyParser.setBody(httpParser.recv_body())

                            if bodyParser.isPokemon():

                                if readNoticePokemonCsv.isNoticePokemon(
                                        bodyParser.getPokemonId()):
                                    print("Send Mail:PokemonId=" +
                                          str(bodyParser.getPokemonId()))
                                    sendEmail.sendMail(
                                        readNoticePokemonCsv.
                                        getNoticePokemonName(
                                            bodyParser.getPokemonId()),
                                        bodyParser.getLatitude(),
                                        bodyParser.getLongitude(),
                                        bodyParser.getTimeUntilHiddenMs())

    finally:
        for sock in inputs:
            sock.close()

    return
示例#16
0
class ConnectionHandler:
    def __init__(self, connection, address, timeout):
        self.body_file = ""
        self.p = HttpParser()
        self.body = []
        self.request_url = ""
        self.response_header = []
        self.header_done = False
        self.url = ""
        self.controller = []
        self.controller_ip = []
        self.client = connection
        self.client_buffer = ''
        self.timeout = timeout
        self.method, self.path, self.protocol = self.get_base_header()
        if self.method == 'CONNECT':
            self.method_CONNECT()
        elif self.method in ('OPTIONS', 'GET', 'HEAD', 'POST', 'PUT', 'DELETE',
                             'TRACE'):
            self.method_others()
        self.client.close()
        self.target.close()
        #clear
        #print self.controller , self.controller_ip

    def get_base_header(self):
        while 1:
            self.client_buffer += self.client.recv(BUFLEN)
            end = self.client_buffer.find('\n')
            if end != -1:
                break
        #We dont wann those google.com urls.
        if not "127.0.0.1" in self.client_buffer[:end]:

            #Insert Url into database here
            self.url = '%s' % self.client_buffer[:end]

        data = (self.client_buffer[:end + 1]).split()
        self.client_buffer = self.client_buffer[end + 1:]
        #print data
        return data

    def method_CONNECT(self):
        self._connect_target(self.path)
        self.client.send(HTTPVER + ' 200 Connection established\n' +
                         'Proxy-agent: %s\n\n' % VERSION)
        self.client_buffer = ''
        self._read_write()

    def method_others(self):
        self.path = self.path[7:]
        i = self.path.find('/')
        host = self.path[:i]
        path = self.path[i:]
        self._connect_target(host)
        self.target.send('%s %s %s\n' % (self.method, path, self.protocol) +
                         self.client_buffer)
        self.client_buffer = ''
        self._read_write()

    def _connect_target(self, host):
        i = host.find(':')
        if i != -1:
            port = int(host[i + 1:])
            host = host[:i]
            #print host
        else:
            port = 80
        try:

            (soc_family, _, _, _, address) = socket.getaddrinfo(host, port)[0]
            print "Adress is ", address
            self.target = socket.socket(soc_family)
            self.target.connect(address)

        except Exception as e:
            print "Error Connecting to:" + str(host) + ":" + str(port)
            self.request_url = "Error Connecting to:" + str(host) + ":" + str(
                port)
            # insert to db here
            #Concat data to string
        self.request_url = str(host) + " | " + str(address) + " | " + str(
            self.url)  #debug
        #print self.request_url

    def _read_write(self):

        time_out_max = self.timeout / 3
        socs = [self.client, self.target]
        count = 0
        while 1:
            count += 1
            (recv, _, error) = select.select(socs, [], socs, 3)
            if error:
                break
            if recv:
                for in_ in recv:
                    try:

                        #print " Receving Data "
                        data = in_.recv(84333)
                    except Exception as e:
                        print e
                        pass

                    if in_ is self.client:
                        out = self.target
                    else:
                        out = self.client
                    try:

                        if data:
                            #column 25
                            #Dig here to analysis the traffic
                            #print data
                            try:
                                #Lets parse the data using http_parser modules

                                recved = len(data)
                                #
                                #print "We received so far "+ str(recved)
                                nparsed = self.p.execute(data, recved)
                                assert nparsed == recved
                                # Check
                                if self.p.is_headers_complete(
                                ) and not self.header_done:

                                    #Header is an ordered dictionary
                                    header_s = self.p.get_headers()

                                    # Lets beautify it and print it.
                                    for header, value in header_s.items():

                                        #Print Response
                                        # Connection : close format
                                        res_header = header + ": " + value
                                        self.response_header.append(res_header)

                                        self.header_done = True
                                    #Put header to Database.

                                #Check if the boday is partial, if then append the body
                                if self.p.is_partial_body():

                                    self.body.append(self.p.recv_body())
                                    #print "appending body" +self.p.recv_body()
                                    #Append data body recived to a list
                                    #print self.body

                                # If the parsing of current request is compleated
                                if self.p.is_message_complete():

                                    try:

                                        try:

                                            content_length = self.p.get_headers(
                                            )['content-length']

                                        except Exception as e:
                                            print "Exception in Body retrive-sub" + str(
                                                e)
                                            content_length = 0
                                            pass

                                            self.body_file = "".join(self.body)
                                        body_file_type = ms.buffer(
                                            self.body_file[:1024])
                                        #print self.request_url
                                        #print self.response_header
                                        #print body_file_type
                                        print urlid
                                        update_traffic(urlid, self.request_url,
                                                       self.response_header,
                                                       body_file_type)
                                    except Exception as e:
                                        print "Exception in Body retrive" + str(
                                            e)
                                        content_length = 0
                                        pass

                            except Exception as e:
                                print e
                                pass

                            #if filetype in traffice == jar,class , pdf,flash, execute
                            #save those files

                            out.send(data)
                            count = 0

                    except Exception as e:
                        print e
                        pass
            if count == time_out_max:
                break
    def recvrawsocket2(sockobj, address):
        try:
            #        if 1:
            # logger.error(sockobj)
            #            logger.debug(dir(sockobj))
            request_path = ""
            body = []
            p = HttpParser()
            seqid = uuid.uuid1()
            requestdict = dict()
            requestdict['sock'] = sockobj
            #                requestdatetime = time.strftime('%Y.%m.%d.%H.%M.%S', time.localtime(time.time()))
            requestdatetime = time.time()
            requestdict['requestdatetime'] = requestdatetime
            responsesocketdict[seqid.__str__()] = requestdict
            # logger.debug("responsesocketdict len = %d", len(responsesocketdict))

            while True:
                request = sockobj.recv(recv_buf_len)
                #                logger.warning("request  : %s" % (request))

                recved = len(request)
                #                logger.warning("recved   : %d" % (recved))

                if (recved == 0):
                    logger.warning("socket is closed by peer %r" % (sockobj))
                    sockobj.close()
                    break

                nparsed = p.execute(request, recved)
                #                logger.warning("nparsed  : %d" % (nparsed))
                if nparsed != recved:
                    logger.warning("parse error")
                    sockobj.close()
                    break

                if p.is_headers_complete():
                    request_headers = p.get_headers()
    #                        for key in request_headers:
    #                        logger.debug("headers complete %s" % (request_headers.__str__()))

    #                        logger.warning("headers complete")

                if p.is_partial_body():
                    body.append(p.recv_body())
    #                        logger.warning("body  : %s" % (body))

                if p.is_message_complete():
                    #                        logger.warning("message complete")
                    break

#            logger.debug(p.get_method())
#            logger.debug(p.get_path())
#            logger.debug(p.get_query_string())

            routekey = ""
            servicepath = ""

            # 如果是/xxx格式认为是route key,如果是/xxx/yyy/zzz格式认为是dest service
            request_path = p.get_path()[1:]
            request_pathlist = request_path.split('/')
            servicename = request_pathlist[0]
            action_name = ''
            servicelist = os.listdir('./apps')
            content = dict()
            if p.get_method() == 'GET':
                if servicename == 'showip':
                    sockobj.sendall("HTTP/1.1 200 OK \n\n%s" % (sockobj))
                    sockobj.shutdown(socket.SHUT_WR)
                    sockobj.close()
                    return

                if len(request_pathlist) != 2:
                    ret = dict()
                    ret['errcode'] = '40004'
                    ret['errmsg'] = _errmsg['40004']
                    sockobj.sendall('HTTP/1.1 500 OK\n\n%s' %
                                    (json.dumps(ret)))
                    sockobj.shutdown(socket.SHUT_WR)
                    sockobj.close()
                    return

                action_name = request_pathlist[1]

                querystring = p.get_query_string()
                querylist = querystring.split('&')
                action_body = dict()
                for query in querylist:
                    kvlist = query.split('=')
                    action_body[kvlist[0]] = ''.join(kvlist[1:])
                content['action_cmd'] = action_name
                content['seq_id'] = str(random.randint(10000, 1000000))
                content['body'] = action_body
                content['version'] = '1.0'

            else:
                if len(body) > 0:
                    content = json.loads("".join(body))
#                content = "".join(body)

# logger.debug("servicename=%s,action_name=%s"%(servicename,action_name))
# logger.debug("content=%r"%(content))
            if servicename == 'testurl':
                sockobj.sendall('HTTP/1.1 200 OK\n\n%s' %
                                (content['body']['signature']))
                sockobj.shutdown(socket.SHUT_WR)
                sockobj.close()
                return

            if servicename in servicelist:
                routekey = "A:Queue:%s" % servicename
                if servicename in _config:
                    routekey = _config[servicename]['Consumer_Queue_Name']
                content['sockid'] = seqid.__str__()
                content['from'] = selfqueuename
                _redis.lpush(routekey, json.dumps(content))
            else:
                ret = dict()
                ret['errcode'] = '40004'
                ret['errmsg'] = _errmsg['40004']
                sockobj.sendall('HTTP/1.1 404 OK\n\n%s' % (json.dumps(ret)))
                sockobj.shutdown(socket.SHUT_WR)
                sockobj.close()

    #                requestdict = dict()
    #                requestdict['sock'] = sockobj
    #                requestdatetime = time.strftime(
    #                    '%Y.%m.%d.%H.%M.%S', time.localtime(time.time()))
    #                requestdict['requestdatetime'] = requestdatetime
    #                responsesocketdict[seqid.__str__()] = requestdict

    # sockobj.sendall('HTTP/1.1 200 OK\n\nWelcome %s' % (
    #    seqid))
    # sockobj.close()

        except Exception as e:
            logger.error("recvrawsocket2 %s except raised : %s " %
                         (e.__class__, e.args))
示例#18
0
class HTTPSession(base_object.BaseObject):

	_http_header = ""
	_method = ""
	_version = ""
	_req_obj = ""
	_user_agent = "User-Agent: COS-598C-Project-Client\r\n"
	_accept = "Accept: */*\r\n"
	_accept_enc = "Accept-Encoding: *\r\n"
	_accept_charset = "Accept-Charset: *\r\n"
	_host = ""
	_writer = ""
	_closeable = False
	_http_parser = ""
	_nr_bytes = 0
	
	def __init__ (self, method, req_obj, version):
		self._method = method
		self._req_obj = req_obj
		self._version = version
		self._http_parser = HttpParser()

	def _build_first_line(self):
		first_line = self._method+" "+self._req_obj+" "+self._version+"\r\n"
		return first_line
	
	def set_host(self, host):
		self._host = "Host: "+host+"\r\n"	

	def set_writer(self, writer):
		self._writer = writer

	def write_response(self, data):
		recved = len(data)
		nparsed = self._http_parser.execute(data, recved)
		assert nparsed == recved
		self._nr_bytes += recved	
		if self._http_parser.is_partial_body():
			self._writer.write(str(self._http_parser.recv_body()))

		if self._http_parser.is_message_complete():
			self._closeable = True
		return self._nr_bytes
		
	def get_response_headers(self):
		if self._http_parser.is_headers_complete():
                	return self._http_parser.get_headers()

	def closeable(self):
		return self._closeable

	def set_port(self, port):
		return

	def get_request(self):
		self._http_header = self._build_first_line()+\
					self._host+\
					self._user_agent+\
					self._accept+\
					self._accept_enc+\
					self._accept_charset+\
					"\r\n"
		return self._http_header
示例#19
0
class CometaClient(object):
    """Connect a device to the Cometa infrastructure"""
    errors = {
        0: 'ok',
        1: 'timeout',
        2: 'network error',
        3: 'protocol error',
        4: 'authorization error',
        5: 'wrong parameters',
        9: 'internal error'
    }

    def __init__(self, server, port, application_id, use_ssl, logger):
        """
    The Cometa instance constructor.

    server: the Cometa server FQDN
    port: the Cometa server port
    application_id: the Cometa application ID
    """
        self.error = 9
        self.debug = False

        self._server = server
        self._port = port
        self._app_id = application_id
        self._use_ssl = use_ssl
        self._message_cb = None

        self._device_id = ""
        self._platform = ""
        self._hparser = None
        self._sock = None  #socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        self._heartbeat_rate = 60
        self._trecv = None
        self._thbeat = None
        self._hb_lock = threading.Lock()
        self._reconnecting = False
        self.log = logger
        return

    def attach(self, device_id, device_info):
        """
    Attach the specified device to a Cometa registered application. 
    Authentication is done using only the application_id (one-way authentication).

    device_id: the device unique identifier
    device_info: a description of the platform or the device (used only as a comment)
    """
        self._device_id = device_id
        self._platform = device_info
        self._hparser = HttpParser()
        tsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        if self._use_ssl:
            self._sock = ssl.wrap_socket(tsock,
                                         ssl_version=ssl.PROTOCOL_SSLv23,
                                         ciphers="AES256-GCM-SHA384")
        else:
            self._sock = tsock
        try:
            self._sock.connect((self._server, self._port))
            sendBuf = "POST /v1/applications/%s/devices/%s HTTP/1.1\r\nHost: api.cometa.io\r\nContent-Length:%d\r\n\r\n%s" % (
                self._app_id, device_id, len(device_info), device_info)
            self._sock.send(sendBuf)
            recvBuf = ""
            while True:
                data = self._sock.recv(1024)
                if not data:
                    break

                dataLen = len(data)
                nparsed = self._hparser.execute(data, dataLen)
                assert nparsed == dataLen

                if self._hparser.is_headers_complete():
                    if self.debug:
                        print "connection for device %s headers received" % (
                            device_id)
                        print self._hparser.get_headers()

                if self._hparser.is_partial_body():
                    recvBuf = self._hparser.recv_body()
                    if self.debug:
                        print "connection for device %s body received" % (
                            device_id)
                        print recvBuf
                    #TODO: check for error in connecting, i.e. 403 already connected

                    # reading the attach complete message from the server
                    # i.e. {"msg":"200 OK","heartbeat":60,"timestamp":1441382935}
                    if len(recvBuf) < 16 or recvBuf[1:12] != '"msg":"200"':
                        self.error = 5
                        print "Error in string from server; %s" % recvBuf
                        return recvBuf

                    # reset error
                    self.error = 0

                    # set the socket non blocking
                    self._sock.setblocking(0)

                    # do not (re)start the threads during a reconnection
                    if self._reconnecting:
                        self._reconnecting = False
                        return recvBuf

                    if self.debug:
                        print "connection for device %s completed" % (
                            device_id)
                        # start the hearbeat thread
                    self._thbeat = threading.Thread(target=self._heartbeat)
                    self._thbeat.daemon = True
                    self._thbeat.start()

                    # start the receive thread
                    #time.sleep(2)
                    self._trecv = threading.Thread(target=self._receive)
                    self._trecv.daemon = True  # force to exit on SIGINT
                    self._trecv.start()

                    return recvBuf
        except Exception, e:
            print e
            self.error = 2
            return
示例#20
0
class HttpProxyProtocol(asyncio.Protocol):
    ''' Implement HTTP(S) proxy behavior. '''

    def __init__(self, loop, config, token_store):
        ''' Constructor. '''

        self._parser = HttpParser()
        self._body = b''
        self._config = config
        self._loop = loop
        self._mitm = None
        self._mitm_host = None
        self._token_store = token_store

        self._instagram = InstagramApi(
            client_id=config['Instagram']['ClientID'],
            client_secret=config['Instagram']['ClientSecret'],
        )

        self._twitter = TwitterApi(
            consumer_key=config['Twitter']['ConsumerKey'],
            consumer_secret=config['Twitter']['ConsumerSecret'],
            app_token=config['Twitter']['AppToken'],
            app_token_secret=config['Twitter']['AppTokenSecret'],
        )

    def connection_made(self, transport):
        ''' Save a reference to the transport so that we can send a reply. '''
        log.debug('Connection opened.')
        self._transport = transport

    def connection_lost(self, exc):
        log.debug('Connection closed.')

    def data_received(self, data):
        ''' Parse incoming HTTP request. '''

        log.debug('Data received: {}'.format(data))
        self._parser.execute(data, len(data))

        if self._parser.is_partial_body():
            self._body += self._parser.recv_body()

        if self._parser.is_message_complete():
            method = self._parser.get_method()
            uri = self._parser.get_url()
            version = self._parser.get_version()
            headers = self._parser.get_headers()
            content_type = headers.get('Content-type', '')
            charset = _get_charset(content_type)
            body = self._body.decode(charset)

            log.debug('Client charset: {}'.format(charset))
            log.debug('Client status: method={} uri={} version={}' \
                      .format(method, uri, version))
            log.debug('Client headers: {}'.format(headers))
            log.debug('Client body: {}...'.format(body[:1000]))

            if method == 'CONNECT':
                asyncio.async(self._start_mitm(uri, version))
                self._parser = HttpParser()
            else:
                asyncio.async(
                    self._request_upstream(
                        method,
                        uri,
                        version,
                        headers,
                        body
                    )
                )


    def start_tls(self, version):
        '''
        Initiate TLS session with the client.

        This part is completely hacky! We mess around with the
        transport's internals in order to wrap the current transport in TLS.
        Python doesn't have an official way to do this, although it *might*
        get fixed in 3.6: http://bugs.python.org/issue23749
        '''

        log.debug('The proxy is starting TLS with its client.')

        status_line = 'HTTP/{}.{} {} {}\r\n\r\n' \
                      .format(version[0], version[1], 200, 'OK')
        self._transport.write(status_line.encode('ascii'))

        ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
        ssl_context.set_ciphers('HIGH:!aNull:!eNull')
        ssl_context.load_cert_chain('ssl/server.crt', 'ssl/server.key')

        original_socket = self._transport._sock
        self._transport = self._loop._make_ssl_transport(
            original_socket,
            self,
            ssl_context,
            server_side=True
        )

    @asyncio.coroutine
    def _request_upstream(self, method, uri, version, headers, body):
        ''' Forward the request to the upstream server. '''

        try:
            yield from self._request_upstream_helper(
                method, uri, version, headers, body
            )
        except Exception:
            charset = _get_charset(headers.get('Content-type', ''))
            response = render_http_response(
                version,
                500,
                'PROXY ERROR',
                {'Content-type': 'text/plain; charset={}'.format(charset)},
                traceback.format_exc().encode(charset)
            )
            self._transport.write(response)
            self._transport.close()
            raise

    @asyncio.coroutine
    def _request_upstream_helper(self, method, uri, version, headers, body):
        ''' Forward the request to the upstream server. '''

        log.debug('_request_upstream(): method={}, uri={}' \
                  .format(method, uri))

        if self._mitm_host:
            parsed = urlparse(uri)
            url = 'https://{}{}'.format(self._mitm_host, parsed.path)
        else:
            url = uri

        token, remaining = self._token_store.dispense(url)
        log.debug('Signing request with {} token: {}.'
                  .format(token.site, token.public))

        if 'instagram' in url:
            qp = parse_qs(parsed.query)
            qp['access_token'] = token.public

            qp['sig'] = self._instagram.oauth_sign(
                method=method,
                url=url,
                token=token,
                query_params=qp,
                body_params=parse_qs(body)
            )

            params = ['{}={}'.format(quote(k.encode('utf8')), quote(v.encode('utf8'))) for k,v in qp.items()]
            uri = '{}?{}'.format(parsed.path, '&'.join(params))
            log.debug('Signed instagram URL: {}'.format(uri))
        elif 'twitter' in url:
            headers['Authorization'] = self._twitter.oauth_sign(
                method=method,
                url=url,
                token=token.public,
                token_secret=token.secret,
                query_params=parse_qs(parsed.query),
                body_params=parse_qs(body)
            )
        else:
            raise ValueError('No signing algorithm known for URL: {}'
                             .format(url))

        if self._mitm is None:
            url = urlparse(uri)
            host = url.hostname
            port = url.port

            if port is None:
                port = 80 if url.scheme == 'http' else 443

            log.debug('Connecting to upstream (plaintext).')
            upstream = yield from asyncio.open_connection(host, port)
            upstream_reader, upstream_writer = upstream
            request = render_http_request(method, uri, version, headers, body)
            upstream_writer.write(request)

            response = b''
            parser = HttpParser()

            while True:
                if not parser.is_headers_complete():
                    data = yield from upstream_reader.readline()
                else:
                    data = yield from upstream_reader.read(
                        int(parser.get_headers()['Content-Length'])
                    )

                log.debug('Received plaintext from upstream: {}'.format(data))
                parser.execute(data, len(data))

                if parser.is_partial_body():
                    body += parser.recv_body()

                if parser.is_message_complete():
                    version = self._parser.get_version()
                    status = self._parser.get_status_code()
                    reason = None # For some reason, the parser doesn't expose this :(
                    headers = self._parser.get_headers()

                    if status == 200:
                        self._token_store.update_rate_limit(url, headers)

                    log.debug('Plaintext upstream status: {}'.format(status))
                    log.debug('Plaintext upstream headers: {}'.format(headers))
                    log.debug('Plaintext upstream body: {}...'.format(body[:1000]))

                    response = render_http_response(
                        version, status, reason, headers, body
                    )

                    break

            upstream_writer.close()

        else:
            upstream_write = self._mitm.forward
            request = render_http_request(method, uri, version, headers, body)
            upstream_write(request)
            response = yield from self._mitm.receive()
            version, status, reason, headers, body = response

            if status == 200:
                self._token_store.update_rate_limit(token, url, headers)

            response = render_http_response(
                version, status, reason, headers, body
            )

        # Forward the upstream response to the client.
        self._transport.write(response)
        self._transport.close()

    def _set_header(self, key, value):
        ''' Set a header value. '''

        key = key.strip().upper()
        value = value.strip()
        self._headers[key] = value

    @asyncio.coroutine
    def _start_mitm(self, uri, version):
        ''' MITM a connection to the upstream server. '''

        log.debug('The proxy is starting an MITM connection.')
        host, port = uri.split(':')
        port = int(port)
        self._mitm_host = host

        _, self._mitm = yield from self._loop.create_connection(
            lambda: MitmProtocol(self._loop, version, self),
            host,
            port,
            ssl = ssl.create_default_context()
        )
示例#21
0
class MitmProtocol(asyncio.Protocol):
    ''' Handles details of MITMing a TLS connection. '''

    def __init__(self, loop, http_version, proxy):
        ''' Constructor. '''

        self._http_version = http_version
        self._loop = loop
        self._parser = HttpParser()
        self._proxy = proxy
        self._received = asyncio.Future()
        self._body = b''

    def connection_made(self, transport):
        ''' Save a reference to the transport. '''

        log.debug('MITM connection opened.')
        self._transport = transport
        cert = self._transport.get_extra_info('peercert')
        log.debug('MITM upstream certificate: {}'.format(cert))
        self._loop.call_soon(self._proxy.start_tls, self._http_version)

    def connection_lost(self, exc):
        log.debug('MITM connection closed.')
        self._received.cancel()

    def data_received(self, data):
        ''' Accumulate request data. '''

        log.debug('MITM data received: {}'.format(data))
        self._parser.execute(data, len(data))

        if self._parser.is_partial_body():
            self._body += self._parser.recv_body()

        if self._parser.is_message_complete():
            version = self._parser.get_version()
            status = self._parser.get_status_code()
            reason = None # For some reason, the parser doesn't expose this :(
            headers = self._parser.get_headers()

            log.debug('MITM upstream status: {}'.format(status))
            log.debug('MITM upstream headers: {}'.format(headers))
            log.debug('MITM upstream body: {}...'.format(self._body[:1000]))

            self._received.set_result(
                (version, status, reason, headers, self._body)
            )
            self._transport.close()

    def forward(self, data):
        ''' Forward data to upstream host. '''

        log.debug('MITM sending data: {}'.format(data))
        self._transport.write(data)

    @asyncio.coroutine
    def receive(self):
        ''' Read data received by this MITM instance. '''

        response = yield from self._received
        return response
示例#22
0
    def _request_upstream_helper(self, method, uri, version, headers, body):
        ''' Forward the request to the upstream server. '''

        log.debug('_request_upstream(): method={}, uri={}' \
                  .format(method, uri))

        if self._mitm_host:
            parsed = urlparse(uri)
            url = 'https://{}{}'.format(self._mitm_host, parsed.path)
        else:
            url = uri

        token, remaining = self._token_store.dispense(url)
        log.debug('Signing request with {} token: {}.'
                  .format(token.site, token.public))

        if 'instagram' in url:
            qp = parse_qs(parsed.query)
            qp['access_token'] = token.public

            qp['sig'] = self._instagram.oauth_sign(
                method=method,
                url=url,
                token=token,
                query_params=qp,
                body_params=parse_qs(body)
            )

            params = ['{}={}'.format(quote(k.encode('utf8')), quote(v.encode('utf8'))) for k,v in qp.items()]
            uri = '{}?{}'.format(parsed.path, '&'.join(params))
            log.debug('Signed instagram URL: {}'.format(uri))
        elif 'twitter' in url:
            headers['Authorization'] = self._twitter.oauth_sign(
                method=method,
                url=url,
                token=token.public,
                token_secret=token.secret,
                query_params=parse_qs(parsed.query),
                body_params=parse_qs(body)
            )
        else:
            raise ValueError('No signing algorithm known for URL: {}'
                             .format(url))

        if self._mitm is None:
            url = urlparse(uri)
            host = url.hostname
            port = url.port

            if port is None:
                port = 80 if url.scheme == 'http' else 443

            log.debug('Connecting to upstream (plaintext).')
            upstream = yield from asyncio.open_connection(host, port)
            upstream_reader, upstream_writer = upstream
            request = render_http_request(method, uri, version, headers, body)
            upstream_writer.write(request)

            response = b''
            parser = HttpParser()

            while True:
                if not parser.is_headers_complete():
                    data = yield from upstream_reader.readline()
                else:
                    data = yield from upstream_reader.read(
                        int(parser.get_headers()['Content-Length'])
                    )

                log.debug('Received plaintext from upstream: {}'.format(data))
                parser.execute(data, len(data))

                if parser.is_partial_body():
                    body += parser.recv_body()

                if parser.is_message_complete():
                    version = self._parser.get_version()
                    status = self._parser.get_status_code()
                    reason = None # For some reason, the parser doesn't expose this :(
                    headers = self._parser.get_headers()

                    if status == 200:
                        self._token_store.update_rate_limit(url, headers)

                    log.debug('Plaintext upstream status: {}'.format(status))
                    log.debug('Plaintext upstream headers: {}'.format(headers))
                    log.debug('Plaintext upstream body: {}...'.format(body[:1000]))

                    response = render_http_response(
                        version, status, reason, headers, body
                    )

                    break

            upstream_writer.close()

        else:
            upstream_write = self._mitm.forward
            request = render_http_request(method, uri, version, headers, body)
            upstream_write(request)
            response = yield from self._mitm.receive()
            version, status, reason, headers, body = response

            if status == 200:
                self._token_store.update_rate_limit(token, url, headers)

            response = render_http_response(
                version, status, reason, headers, body
            )

        # Forward the upstream response to the client.
        self._transport.write(response)
        self._transport.close()
示例#23
0
class QHttpConnection(QObject):
    newRequest = pyqtSignal(QHttpRequest, QHttpResponse)
    disconnected = pyqtSignal()
    
    def __init__(self, sock, parent = None):
        super(QHttpConnection, self).__init__(parent)

        self.m_sock = sock
        self.m_body = []
        self.m_parser = HttpParser()

        self.m_request = QHttpRequest(self)
        self.m_request = None
        self.m_response = QHttpResponse(self)
        self.m_response = None
        
        self.m_sock.readyRead.connect(self._onReadyRead)
        self.m_sock.disconnected.connect(self._onDisconnected)
        self.m_sock.bytesWritten.connect(self._onBytesWritten)
        
        return
    
    def write(self, data):
        self.m_sock.write(data)
        return

    def _onReadyRead(self):
        #qDebug('hehe')
        qtdata = self.m_sock.readAll()
        pydata = qtdata.data()
        np = self.m_parser.execute(pydata, len(pydata))
        qDebug(str(np) + '=?' + str(len(pydata)))
        #qDebug(qtdata)
        #qDebug(qtdata.toHex())
        #print(self.m_parser._body)
        #print(self.m_parser._body)

        #qDebug(str(self.m_parser.is_message_begin()))
        #qDebug(str(self.m_parser.is_message_complete()))
        #qDebug(str(self.m_parser.is_headers_complete()))

        if self.m_parser.is_headers_complete():
            if self.m_request != None:
                qWarning('alread have a request object')
            else:
                self.m_request = QHttpRequest(self)
                _qogc.add(self.m_request)
                # qDebug(str(self.m_request))
                # print(self.m_parser.get_headers())
            True

        ### body area
        # qDebug(str(self.m_parser.is_message_begin()))
        # not use lines,这个可能指的是在客户端时,数据下载完成标识吧。
        if self.m_parser.is_message_begin() and self.m_request != None:
            qDebug('body coming...')
            self.m_request.hasBody()
            
        mth = self.m_parser.get_method()
        # qDebug(mth)
            
        if mth == 'GET':
            if self.m_parser.is_headers_complete():
                self.m_response = QHttpResponse(self)
                self.m_response.done.connect(self._onResponseDone)
                _qogc.add(self.m_response)

                self.newRequest.emit(self.m_request, self.m_response)
            pass
        elif mth == 'POST':
            if self.m_parser.is_partial_body(): self.m_body.append(self.m_parser.recv_body())
            if self.m_parser.is_message_complete(): print(b''.join(self.m_body))
        elif mth == 'CONNECT':
            if self.m_parser.is_headers_complete():
                if self.m_response != None:
                    qWarning('alread have a response object')
                else:
                    self.m_response = QHttpResponse(self)
                    self.m_response.done.connect(self._onResponseDone)
                    _qogc.add(self.m_response)

                    self.newRequest.emit(self.m_request, self.m_response)
            else:
                qDebug('hdr not complete')
            True
        else:
            qWarning("not impled method:" + mth)
            self.m_sock.close()
        
        return

    def _onDisconnected(self):
        # qDebug('hehe')
        self.disconnected.emit()
        return

    def _onBytesWritten(self, count):
        # qDebug('hehe')
        return

    def _onResponseDone(self):
        self.m_sock.disconnectFromHost()
        self.m_sock.close()
        # qDebug(str(self.m_request))
        return

    def close(self):
        self.m_sock.flush()
        self.m_sock.close()
        return

    def last(self): return
示例#24
0
文件: proxy_links.py 项目: BwRy/sandy
class ConnectionHandler:
    def __init__(self, connection, address, timeout):
        self.body_file =""
        self.p = HttpParser()
	self.body = []
	self.request_url = ""
	self.response_header = []
	self.header_done = False
        self.url =""
        self.controller = []
        self.controller_ip = []
        self.client = connection
        self.client_buffer = ''
        self.timeout = timeout
        self.method, self.path, self.protocol = self.get_base_header()
        if self.method=='CONNECT':
            self.method_CONNECT()
        elif self.method in ('OPTIONS', 'GET', 'HEAD', 'POST', 'PUT',
                             'DELETE', 'TRACE'):
            self.method_others()
        self.client.close()
        self.target.close()
        #clear
        #print self.controller , self.controller_ip

    def get_base_header(self):
        while 1:
            self.client_buffer += self.client.recv(BUFLEN)
            end = self.client_buffer.find('\n')
            if end!=-1:
                break
        #We dont wann those google.com urls.        
        if not "127.0.0.1" in self.client_buffer[:end]:
	  
	  #Insert Url into database here
          self.url = '%s'%self.client_buffer[:end]
          
          
        data = (self.client_buffer[:end+1]).split()
        self.client_buffer = self.client_buffer[end+1:]
        #print data
        return data

    def method_CONNECT(self):
        self._connect_target(self.path)
        self.client.send(HTTPVER+' 200 Connection established\n'+
                         'Proxy-agent: %s\n\n'%VERSION)
        self.client_buffer = ''
        self._read_write()        

    def method_others(self):
        self.path = self.path[7:]
        i = self.path.find('/')
        host = self.path[:i]        
        path = self.path[i:]
        self._connect_target(host)
        self.target.send('%s %s %s\n'%(self.method, path, self.protocol)+
                         self.client_buffer)
        self.client_buffer = ''
        self._read_write()

    def _connect_target(self, host):
        i = host.find(':')
        if i!=-1:
            port = int(host[i+1:])
            host = host[:i]
            #print host
        else:
            port = 80
        try:
	  
	  
          (soc_family, _, _, _, address) = socket.getaddrinfo(host, port)[0]
          self.target = socket.socket(soc_family)
          self.target.connect(address)
          
        except Exception as e:
	  address =host
	  print "Error Connecting to:"+str(address)
	  connect_ip = "Error Connecting to:"+str(address)
	  update_traffic_link(urlid,connect_ip,"Unable to Connect","Nil","")
	  # insert to db here
        #Concat data to string
        self.request_url = str(host)+" | "+str(address)+" | "+str(self.url) #debug
        #print self.request_url


    def _read_write(self):
        
        time_out_max = self.timeout/3
        socs = [self.client, self.target]
        count = 0
        while 1:
            count += 1
            (recv, _, error) = select.select(socs, [], socs, 3)
            if error:
                break
            if recv:
                for in_ in recv:
		    try:
		      
		      #print " Receving Data "
                      data = in_.recv(10000)
                    except Exception as e:
		      print e
		      pass
		    
                    if in_ is self.client:
                        out = self.target
                    else:
                        out = self.client
                    try:
		      
		      
		      if data:
			  #column 25
			  #Dig here to analysis the traffic
			  #print data
			  try:
			    #Lets parse the data using http_parser modules
			    
			    recved = len(data)
			    #
			    #print "We received so far "+ str(recved)
			    nparsed = self.p.execute(data, recved)
			    assert nparsed == recved
			    # Check 
			    if self.p.is_headers_complete() and not self.header_done:
			      
			      #Header is an ordered dictionary 
			      header_s = self.p.get_headers()
			      
			     
			      # Lets beautify it and print it.
			      for header, value in header_s.items():
				
				#Print Response
				# Connection : close format
				res_header = header+": "+value
				self.response_header.append(res_header)
			      
			        self.header_done = True
			        #Put header to Database.
			        
			   
			    #Check if the boday is partial, if then append the body
			    if self.p.is_partial_body():
			      
			      self.body.append(self.p.recv_body())
			      #print "appending body" +self.p.recv_body()
			      #Append data body recived to a list
			      #print self.body
			      
			    # If the parsing of current request is compleated 
			    if self.p.is_message_complete():
			      
			      try:
				
				try:
				  
				  content_length = self.p.get_headers()['content-length']
			        
			        except Exception as e:
				  print "Exception in Body retrive-sub"+str(e)
				  content_length = 0
				  pass
				  
			        self.body_file = "".join(self.body)
			        body_file_type = ms.buffer(self.body_file[:1024])
			        signature_scan = ""
			        html_source =""
			        html_body=""
			        html_body = self.body_file
			        if "gzip" in body_file_type:
				  try:
				    
				    print " Decoding GZIp html\n"
				    html_body = zlib.decompress(html_body, 16+zlib.MAX_WBITS)
				    #print "source"+str(html_body)
				  except Exception as e:
				    print "Error gzip decoding:"+str(e)
				    
				  
			        
			        print urlid 
			        signature_scan_body = yara_match(html_body)
			        signature_scan_request = yara_match(self.request_url)
			        signature_scan_response =""
			        self_response = ""
			        try:
				  #This is a list convert to string and do the check
				  print self.response_header
				  self_response = ''.join(self.response_header)
				  if "Content-Disposition:" in self_response and "attachment;" in  self_response:
				    signature_scan_response = "Forced-file-download"
				    print " Signatured matched in response"
				    
				except Exception as e:
				  print e,"Error in header_match"
			        signature_scan = str(signature_scan_body) +""+str(signature_scan_request)+""+signature_scan_response
  
			        #print self.request_url
			        #print self.response_header
			        #print body_file_type
			        
			        
			        if len(signature_scan) > 6:
				  try:
				    
				    print " Signatured found and Updating\n"
				    body_file_type = "Signature_Matched: "+signature_scan+" ing "+body_file_type
				    insert_html(urlid,html_body,signature_scan)
				    html_source = html_body
				  
				  except Exception as e:
				    print "Error in Traffic Signature"+str(e)
				  
				print " Trffic Updated\n"
			        update_traffic_link(urlid,self.request_url,self.response_header,body_file_type,html_source)
				  
			        if "executable" in body_file_type:
				  print "\nExecutable found\n"
				  binary_found(urlid)
				  
				  
			      except Exception as e:
				print "Exception in Body retrive"+str(e)
				content_length = 0
				pass
			      
			      
			  except Exception as e:
			    print e
			    pass

			  #if filetype in traffice == jar,class , pdf,flash, execute
			  #save those files
			  
			  
			  out.send(data)
			  count = 0
		      
	
	            except Exception as e:
		      print e
		      pass
            if count == time_out_max:
                break
示例#25
0
文件: proxy.py 项目: BwRy/sandy
class ConnectionHandler:
    def __init__(self, connection, address, timeout):
        self.body_file =""
        self.p = HttpParser()
	self.body = []
	self.request_url = ""
	self.response_header = []
	self.header_done = False
        self.url =""
        self.controller = []
        self.controller_ip = []
        self.client = connection
        self.client_buffer = ''
        self.timeout = timeout
        self.method, self.path, self.protocol = self.get_base_header()
        if self.method=='CONNECT':
            self.method_CONNECT()
        elif self.method in ('OPTIONS', 'GET', 'HEAD', 'POST', 'PUT',
                             'DELETE', 'TRACE'):
            self.method_others()
        self.client.close()
        self.target.close()
        #clear
        #print self.controller , self.controller_ip

    def get_base_header(self):
        while 1:
            self.client_buffer += self.client.recv(BUFLEN)
            end = self.client_buffer.find('\n')
            if end!=-1:
                break
        #We dont wann those google.com urls.        
        if not "127.0.0.1" in self.client_buffer[:end]:
	  
	  #Insert Url into database here
          self.url = '%s'%self.client_buffer[:end]
          
          
        data = (self.client_buffer[:end+1]).split()
        self.client_buffer = self.client_buffer[end+1:]
        #print data
        return data

    def method_CONNECT(self):
        self._connect_target(self.path)
        self.client.send(HTTPVER+' 200 Connection established\n'+
                         'Proxy-agent: %s\n\n'%VERSION)
        self.client_buffer = ''
        self._read_write()        

    def method_others(self):
        self.path = self.path[7:]
        i = self.path.find('/')
        host = self.path[:i]        
        path = self.path[i:]
        self._connect_target(host)
        self.target.send('%s %s %s\n'%(self.method, path, self.protocol)+
                         self.client_buffer)
        self.client_buffer = ''
        self._read_write()

    def _connect_target(self, host):
        i = host.find(':')
        if i!=-1:
            port = int(host[i+1:])
            host = host[:i]
            #print host
        else:
            port = 80
        try:
	  
	  
          (soc_family, _, _, _, address) = socket.getaddrinfo(host, port)[0]
          print "Adress is ",address
          self.target = socket.socket(soc_family)
          self.target.connect(address)
          
        except Exception as e:
	  print "Error Connecting to:"+str(host)+":"+str(port)
	  self.request_url = "Error Connecting to:"+str(host)+":"+str(port)
	  # insert to db here
        #Concat data to string
        self.request_url = str(host)+" | "+str(address)+" | "+str(self.url) #debug
        #print self.request_url


    def _read_write(self):
        
        time_out_max = self.timeout/3
        socs = [self.client, self.target]
        count = 0
        while 1:
            count += 1
            (recv, _, error) = select.select(socs, [], socs, 3)
            if error:
                break
            if recv:
                for in_ in recv:
		    try:
		      
		      #print " Receving Data "
                      data = in_.recv(84333)
                    except Exception as e:
		      print e
		      pass
		    
                    if in_ is self.client:
                        out = self.target
                    else:
                        out = self.client
                    try:
		      
		      
		      if data:
			  #column 25
			  #Dig here to analysis the traffic
			  #print data
			  try:
			    #Lets parse the data using http_parser modules
			    
			    recved = len(data)
			    #
			    #print "We received so far "+ str(recved)
			    nparsed = self.p.execute(data, recved)
			    assert nparsed == recved
			    # Check 
			    if self.p.is_headers_complete() and not self.header_done:
			      
			      #Header is an ordered dictionary 
			      header_s = self.p.get_headers()
			      
			     
			      # Lets beautify it and print it.
			      for header, value in header_s.items():
				
				#Print Response
				# Connection : close format
				res_header = header+": "+value
				self.response_header.append(res_header)
			      
			        self.header_done = True
			        #Put header to Database.
			        
			   
			    #Check if the boday is partial, if then append the body
			    if self.p.is_partial_body():
			      
			      self.body.append(self.p.recv_body())
			      #print "appending body" +self.p.recv_body()
			      #Append data body recived to a list
			      #print self.body
			      
			    # If the parsing of current request is compleated 
			    if self.p.is_message_complete():
			      
			      try:
				
				try:
				  
				  content_length = self.p.get_headers()['content-length']
			        
			        except Exception as e:
				  print "Exception in Body retrive-sub"+str(e)
				  content_length = 0
				  pass
				  
				  self.body_file = "".join(self.body)
			        body_file_type = ms.buffer(self.body_file[:1024])
			        #print self.request_url
			        #print self.response_header
			        #print body_file_type
			        print urlid 
			        update_traffic(urlid,self.request_url,self.response_header,body_file_type)
			      except Exception as e:
				print "Exception in Body retrive"+str(e)
				content_length = 0
				pass
			      
			      
			  except Exception as e:
			    print e
			    pass

			  #if filetype in traffice == jar,class , pdf,flash, execute
			  #save those files
			  
			  
			  out.send(data)
			  count = 0
		      
	
	            except Exception as e:
		      print e
		      pass
            if count == time_out_max:
                break
示例#26
0
class ConnectionHandler:
    def __init__(self, connection, address, timeout):
        self.body_file = ""
        self.p = HttpParser()
        self.body = []
        self.request_url = ""
        self.response_header = []
        self.header_done = False
        self.url = ""
        self.controller = []
        self.controller_ip = []
        self.client = connection
        self.client_buffer = ''
        self.timeout = timeout
        self.method, self.path, self.protocol = self.get_base_header()
        if self.method == 'CONNECT':
            self.method_CONNECT()
        elif self.method in ('OPTIONS', 'GET', 'HEAD', 'POST', 'PUT', 'DELETE',
                             'TRACE'):
            self.method_others()
        self.client.close()
        self.target.close()
        #clear
        #print self.controller , self.controller_ip

    def get_base_header(self):
        while 1:
            self.client_buffer += self.client.recv(BUFLEN)
            end = self.client_buffer.find('\n')
            if end != -1:
                break
        #We dont wann those google.com urls.
        if not "127.0.0.1" in self.client_buffer[:end]:

            #Insert Url into database here
            self.url = '%s' % self.client_buffer[:end]

        data = (self.client_buffer[:end + 1]).split()
        self.client_buffer = self.client_buffer[end + 1:]
        #print data
        return data

    def method_CONNECT(self):
        self._connect_target(self.path)
        self.client.send(HTTPVER + ' 200 Connection established\n' +
                         'Proxy-agent: %s\n\n' % VERSION)
        self.client_buffer = ''
        self._read_write()

    def method_others(self):
        self.path = self.path[7:]
        i = self.path.find('/')
        host = self.path[:i]
        path = self.path[i:]
        self._connect_target(host)
        self.target.send('%s %s %s\n' % (self.method, path, self.protocol) +
                         self.client_buffer)
        self.client_buffer = ''
        self._read_write()

    def _connect_target(self, host):
        i = host.find(':')
        if i != -1:
            port = int(host[i + 1:])
            host = host[:i]
            #print host
        else:
            port = 80
        try:

            (soc_family, _, _, _, address) = socket.getaddrinfo(host, port)[0]
            self.target = socket.socket(soc_family)
            self.target.connect(address)

        except Exception as e:
            address = host
            print "Error Connecting to:" + str(address)
            connect_ip = "Error Connecting to:" + str(address)
            update_traffic_link(urlid, connect_ip, "Unable to Connect", "Nil",
                                "")
            # insert to db here
            #Concat data to string
        self.request_url = str(host) + " | " + str(address) + " | " + str(
            self.url)  #debug
        #print self.request_url

    def _read_write(self):

        time_out_max = self.timeout / 3
        socs = [self.client, self.target]
        count = 0
        while 1:
            count += 1
            (recv, _, error) = select.select(socs, [], socs, 3)
            if error:
                break
            if recv:
                for in_ in recv:
                    try:

                        #print " Receving Data "
                        data = in_.recv(10000)
                    except Exception as e:
                        print e
                        pass

                    if in_ is self.client:
                        out = self.target
                    else:
                        out = self.client
                    try:

                        if data:
                            #column 25
                            #Dig here to analysis the traffic
                            #print data
                            try:
                                #Lets parse the data using http_parser modules

                                recved = len(data)
                                #
                                #print "We received so far "+ str(recved)
                                nparsed = self.p.execute(data, recved)
                                assert nparsed == recved
                                # Check
                                if self.p.is_headers_complete(
                                ) and not self.header_done:

                                    #Header is an ordered dictionary
                                    header_s = self.p.get_headers()

                                    # Lets beautify it and print it.
                                    for header, value in header_s.items():

                                        #Print Response
                                        # Connection : close format
                                        res_header = header + ": " + value
                                        self.response_header.append(res_header)

                                        self.header_done = True
                                    #Put header to Database.

                                #Check if the boday is partial, if then append the body
                                if self.p.is_partial_body():

                                    self.body.append(self.p.recv_body())
                                    #print "appending body" +self.p.recv_body()
                                    #Append data body recived to a list
                                    #print self.body

                                # If the parsing of current request is compleated
                                if self.p.is_message_complete():

                                    try:

                                        try:

                                            content_length = self.p.get_headers(
                                            )['content-length']

                                        except Exception as e:
                                            print "Exception in Body retrive-sub" + str(
                                                e)
                                            content_length = 0
                                            pass

                                        self.body_file = "".join(self.body)
                                        body_file_type = ms.buffer(
                                            self.body_file[:1024])
                                        signature_scan = ""
                                        html_source = ""
                                        html_body = ""
                                        html_body = self.body_file
                                        if "gzip" in body_file_type:
                                            try:

                                                print " Decoding GZIp html\n"
                                                html_body = zlib.decompress(
                                                    html_body,
                                                    16 + zlib.MAX_WBITS)
                                                #print "source"+str(html_body)
                                            except Exception as e:
                                                print "Error gzip decoding:" + str(
                                                    e)

                                        print urlid
                                        signature_scan_body = yara_match(
                                            html_body)
                                        signature_scan_request = yara_match(
                                            self.request_url)
                                        signature_scan_response = ""
                                        self_response = ""
                                        try:
                                            #This is a list convert to string and do the check
                                            print self.response_header
                                            self_response = ''.join(
                                                self.response_header)
                                            if "Content-Disposition:" in self_response and "attachment;" in self_response:
                                                signature_scan_response = "Forced-file-download"
                                                print " Signatured matched in response"

                                        except Exception as e:
                                            print e, "Error in header_match"
                                        signature_scan = str(
                                            signature_scan_body) + "" + str(
                                                signature_scan_request
                                            ) + "" + signature_scan_response

                                        #print self.request_url
                                        #print self.response_header
                                        #print body_file_type

                                        if len(signature_scan) > 6:
                                            try:

                                                print " Signatured found and Updating\n"
                                                body_file_type = "Signature_Matched: " + signature_scan + " ing " + body_file_type
                                                insert_html(
                                                    urlid, html_body,
                                                    signature_scan)
                                                html_source = html_body

                                            except Exception as e:
                                                print "Error in Traffic Signature" + str(
                                                    e)

                                        print " Trffic Updated\n"
                                        update_traffic_link(
                                            urlid, self.request_url,
                                            self.response_header,
                                            body_file_type, html_source)

                                        if "executable" in body_file_type:
                                            print "\nExecutable found\n"
                                            binary_found(urlid)

                                    except Exception as e:
                                        print "Exception in Body retrive" + str(
                                            e)
                                        content_length = 0
                                        pass

                            except Exception as e:
                                print e
                                pass

                            #if filetype in traffice == jar,class , pdf,flash, execute
                            #save those files

                            out.send(data)
                            count = 0

                    except Exception as e:
                        print e
                        pass
            if count == time_out_max:
                break
示例#27
0
    def recvrawsocket2(sockobj, address):
        try:

            logger.error(sockobj)
            request_path = ""
            body = []
            p = HttpParser()
            seqid = uuid.uuid1()
            requestdict = dict()
            requestdict['sock'] = sockobj
            #                requestdatetime = time.strftime('%Y.%m.%d.%H.%M.%S', time.localtime(time.time()))
            requestdatetime = time.time()
            requestdict['requestdatetime'] = requestdatetime
            responsesocketdict[seqid.__str__()] = requestdict
            logger.debug("responsesocketdict len = %d",
                         len(responsesocketdict))

            while True:
                request = sockobj.recv(recv_buf_len)
                #                    logger.warning("request  : %s" % (request))

                recved = len(request)
                #                    logger.warning("recved   : %d" % (recved))

                if (recved == 0):
                    logger.warning("socket is closed by peer %r" % (sockobj))
                    sockobj.close()
                    break

                nparsed = p.execute(request, recved)
                logger.warning("nparsed  : %d" % (nparsed))
                if nparsed != recved:
                    logger.warning("parse error")
                    sockobj.close()
                    break

                if p.is_headers_complete():
                    request_headers = p.get_headers()
    #                        for key in request_headers:
    #                        logger.debug("headers complete %s" % (request_headers.__str__()))

    #                        logger.warning("headers complete")

                if p.is_partial_body():
                    body.append(p.recv_body())
    #                        logger.warning("body  : %s" % (body))

                if p.is_message_complete():
                    #                        logger.warning("message complete")
                    break

            content = "".join(body)

            #                seqid = uuid.uuid1()

            routekey = ""
            servicepath = ""

            # 如果是/xxx格式认为是route key,如果是/xxx/yyy/zzz格式认为是dest service
            request_path = p.get_path()[1:]

            #                logger.warning('PublishThread request_path (%s), is routekey (%d)' % (request_path, request_path.find('/')))
            #                logger.debug("content : %s" % (content))

            servicelist = os.listdir('./apps')

            if request_path.find('/') == -1 and len(
                    request_path) and request_path in servicelist:

                routekey = "A:Queue:%s" % request_path
                if request_path in _config:
                    routekey = _config[request_path]['Consumer_Queue_Name']

                if len(content) == 0:
                    content_json = dict()
                else:
                    content_json = json.loads(content)

                content_json['sockid'] = seqid.__str__()
                content_json['from'] = selfqueuename
                _redis.lpush(routekey, json.dumps(content_json))
            else:
                ret = dict()
                ret['error_code'] = '40004'
                sockobj.sendall('HTTP/1.1 200 OK\n\n%s' % (json.dumps(ret)))
                sockobj.shutdown(socket.SHUT_WR)
                sockobj.close()

    #                requestdict = dict()
    #                requestdict['sock'] = sockobj
    #                requestdatetime = time.strftime(
    #                    '%Y.%m.%d.%H.%M.%S', time.localtime(time.time()))
    #                requestdict['requestdatetime'] = requestdatetime
    #                responsesocketdict[seqid.__str__()] = requestdict

    # sockobj.sendall('HTTP/1.1 200 OK\n\nWelcome %s' % (
    #    seqid))
    # sockobj.close()

        except Exception as e:
            logger.error("recvrawsocket2 %s except raised : %s " %
                         (e.__class__, e.args))
示例#28
0
    def run(self):
        queuename = "A:Queue:httpproxy"
        if self._config is not None and 'httpproxy' in self._config and self._config['httpproxy'] is not None:
            if 'Consumer_Queue_Name' in self._config['httpproxy'] and self._config['httpproxy']['Consumer_Queue_Name'] is not None:
                queuename = self._config['httpproxy']['Consumer_Queue_Name']

        selfqueuename = "%s:%s" % (queuename, os.getpid())
        logger.debug("PublishThread::run : %s" % (selfqueuename))
        servicelist = os.listdir('./apps')
        while True:
            try:
                sockobj = self._httpclientsocketqueue.get()
                request_path = ""
                body = []
                p = HttpParser()
                seqid = uuid.uuid1()
                requestdict = dict()
                requestdict['sock'] = sockobj
#                requestdatetime = time.strftime(
#                    '%Y.%m.%d.%H.%M.%S', time.localtime(time.time()))
                requestdict['requestdatetime'] = time.time()
                self._response_socket_dict[seqid.__str__()] = requestdict
#                logger.debug("responsesocketdict len = %d", len(self._response_socket_dict))

                while True:
                    request = sockobj.recv(self._recvbuflen)
#                    logger.warning("request  : %s" % (request))

                    recved = len(request)
#                    logger.warning("recved   : %d" % (recved))

                    if(recved == 0):
                        logger.warning("socket is closed by peer")
                        sockobj.close()
                        break

                    nparsed = p.execute(request, recved)
#                    logger.warning("nparsed  : %d" % (nparsed))
                    if nparsed != recved:
                        logger.warning("parse error")
                        sockobj.close()
                        break

#                    if p.is_headers_complete():
#                        request_headers = p.get_headers()
#                        for key in request_headers:
#                            logger.debug("%s: %s" % (key, request_headers[key]))

#                        logger.warning("headers complete")

                    if p.is_partial_body():
                        body.append(p.recv_body())
#                        logger.warning("body  : %s" % (body))

                    if p.is_message_complete():
#                        logger.warning("message complete")
                        break

                content = "".join(body)


                routekey = ""
                servicepath = ""

                # 如果是/xxx格式认为是route key,如果是/xxx/yyy/zzz格式认为是dest service
                request_path = p.get_path()[1:]

#                logger.warning('PublishThread request_path (%s), is routekey (%d)' % (request_path, request_path.find('/')))
#                logger.debug("content : %s" % (content))

                if request_path.find('/') == -1 and len(request_path) and request_path in servicelist:

                    routekey = "A:Queue:%s" % request_path
                    if request_path in self._config:
                        routekey = self._config[request_path]['Consumer_Queue_Name']

                    if len(content) == 0:
                        content_json = dict()
                    else:
                        content_json = json.loads(content)

                    content_json['sockid'] = seqid.__str__()
                    content_json['from'] = selfqueuename
                    self._redis.lpush(routekey, json.dumps(content_json))
                else:
                    ret = dict()
                    ret['error_code'] = '40004'
                    sockobj.sendall('HTTP/1.1 200 OK\n\n%s' % (json.dumps(ret)))
#                    sockobj.shutdown(socket.SHUT_WR)
                    sockobj.close()
                    self._response_socket_dict.pop(seqid.__str__())
                    continue



                # sockobj.sendall('HTTP/1.1 200 OK\n\nWelcome %s' % (
                #    seqid))
                # sockobj.close()

            except Exception as e:
                logger.error("PublishThread %s except raised : %s " % (
                    e.__class__, e.args))
示例#29
0
class HTTPSession(base_object.BaseObject):

    _http_header = ""
    _method = ""
    _version = ""
    _req_obj = ""
    _user_agent = "User-Agent: COS-598C-Project-Client\r\n"
    _accept = "Accept: */*\r\n"
    _accept_enc = "Accept-Encoding: *\r\n"
    _accept_charset = "Accept-Charset: *\r\n"
    _host = ""
    _writer = ""
    _closeable = False
    _http_parser = ""
    _nr_bytes = 0

    def __init__(self, method, req_obj, version):
        self._method = method
        self._req_obj = req_obj
        self._version = version
        self._http_parser = HttpParser()

    def _build_first_line(self):
        first_line = self._method + " " + self._req_obj + " " + self._version + "\r\n"
        return first_line

    def set_host(self, host):
        self._host = "Host: " + host + "\r\n"

    def set_writer(self, writer):
        self._writer = writer

    def write_response(self, data):
        recved = len(data)
        nparsed = self._http_parser.execute(data, recved)
        assert nparsed == recved
        self._nr_bytes += recved
        if self._http_parser.is_partial_body():
            self._writer.write(str(self._http_parser.recv_body()))

        if self._http_parser.is_message_complete():
            self._closeable = True
        return self._nr_bytes

    def get_response_headers(self):
        if self._http_parser.is_headers_complete():
            return self._http_parser.get_headers()

    def closeable(self):
        return self._closeable

    def set_port(self, port):
        return

    def get_request(self):
        self._http_header = self._build_first_line()+\
           self._host+\
           self._user_agent+\
           self._accept+\
           self._accept_enc+\
           self._accept_charset+\
           "\r\n"
        return self._http_header
示例#30
0
async def fetch(
    url: str,
    method: str = "GET",
    headers=None,
    body: Optional[bytes] = None,
    connect_timeout=DEFAULT_CONNECT_TIMEOUT,
    request_timeout=DEFAULT_REQUEST_TIMEOUT,
    resolver=resolve,
    max_buffer_size=DEFAULT_BUFFER_SIZE,
    follow_redirects: bool = False,
    max_redirects=DEFAULT_MAX_REDIRECTS,
    validate_cert=config.http_client.validate_certs,
    allow_proxy: bool = False,
    proxies=None,
    user: Optional[str] = None,
    password: Optional[str] = None,
    content_encoding: Optional[str] = None,
    eof_mark: Optional[bytes] = None,
) -> Tuple[int, Dict[str, Any], bytes]:
    """

    :param url: Fetch URL
    :param method: request method "GET", "POST", "PUT" etc
    :param headers: Dict of additional headers
    :param body: Request body for POST and PUT request
    :param connect_timeout:
    :param request_timeout:
    :param resolver:
    :param follow_redirects:
    :param max_redirects:
    :param validate_cert:
    :param allow_proxy:
    :param proxies:
    :param user:
    :param password:
    :param max_buffer_size:
    :param content_encoding:
    :param eof_mark: Do not consider connection reset as error if
      eof_mark received (string or list)
    :return: code, headers, body
    """
    def get_connect_options():
        opts = {}
        if use_tls and not proxy:
            ctx = ssl.create_default_context(ssl.Purpose.SERVER_AUTH)
            if validate_cert:
                ctx.check_hostname = True
                ctx.verify_mode = ssl.CERT_REQUIRED
            else:
                ctx.check_hostname = False
                ctx.verify_mode = ssl.CERT_NONE
            opts["ssl"] = ctx
        return opts

    metrics["httpclient_requests", ("method", method.lower())] += 1
    #
    if eof_mark:
        eof_mark = smart_bytes(eof_mark)
    # Detect proxy when necessary
    u = urlparse(str(url))
    use_tls = u.scheme == "https"
    proto = "HTTPS" if use_tls else "HTTP"
    logger.debug("%s %s %s", proto, method, url)
    if ":" in u.netloc:
        host, port = u.netloc.rsplit(":")
        port = int(port)
    else:
        host = u.netloc
        port = DEFAULT_PORTS.get(u.scheme)
        if not port:
            return ERR_TIMEOUT, {}, b"Cannot resolve port for scheme: %s" % smart_bytes(
                u.scheme)
    if is_ipv4(host):
        addr = host
    else:
        addr = await resolver(host)
    if not addr:
        return ERR_TIMEOUT, {}, "Cannot resolve host: %s" % host
    # Detect proxy server
    if allow_proxy:
        proxy = (proxies or SYSTEM_PROXIES).get(u.scheme)
    else:
        proxy = None
    # Connect
    reader, writer = None, None
    if proxy:
        connect_address = proxy
    elif isinstance(addr, tuple):
        connect_address = addr
    else:
        connect_address = (addr, port)
    try:
        try:
            if proxy:
                logger.debug("Connecting to proxy %s:%s", connect_address[0],
                             connect_address[1])
            reader, writer = await asyncio.wait_for(
                asyncio.open_connection(connect_address[0], connect_address[1],
                                        **get_connect_options()),
                connect_timeout,
            )
        except ConnectionRefusedError:
            metrics["httpclient_timeouts"] += 1
            return ERR_TIMEOUT, {}, b"Connection refused"
        except OSError as e:
            metrics["httpclient_timeouts"] += 1
            return ERR_TIMEOUT, {}, b"Connection error: %s" % smart_bytes(e)
        except asyncio.TimeoutError:
            metrics["httpclient_timeouts"] += 1
            return ERR_TIMEOUT, {}, b"Connection timed out"
        # Proxy CONNECT
        if proxy:
            logger.debug("Sending CONNECT %s:%s", addr, port)
            # Send CONNECT request
            req = b"CONNECT %s:%s HTTP/1.1\r\nUser-Agent: %s\r\n\r\n" % (
                smart_bytes(addr),
                smart_bytes(port),
                smart_bytes(DEFAULT_USER_AGENT),
            )
            writer.write(smart_bytes(req))
            try:
                await asyncio.wait_for(writer.drain(), request_timeout)
            except asyncio.TimeoutError:
                metrics["httpclient_proxy_timeouts"] += 1
                return ERR_TIMEOUT, {}, b"Timed out while sending request to proxy"
            # Wait for proxy response
            parser = HttpParser()
            while not parser.is_headers_complete():
                try:
                    data = await asyncio.wait_for(reader.read(max_buffer_size),
                                                  request_timeout)
                except asyncio.TimeoutError:
                    metrics["httpclient_proxy_timeouts"] += 1
                    return ERR_TIMEOUT, {}, b"Timed out while sending request to proxy"
                received = len(data)
                parsed = parser.execute(data, received)
                if parsed != received:
                    return ERR_PARSE_ERROR, {}, b"Parse error"
            code = parser.get_status_code()
            logger.debug("Proxy response: %s", code)
            if not 200 <= code <= 299:
                return code, parser.get_headers(), "Proxy error: %s" % code
        # Process request
        body = body or ""
        content_type = "application/binary"
        if not isinstance(body, (str, bytes)):
            body = smart_text(orjson.dumps(body))
            content_type = "text/json"
        body = smart_bytes(body)  # Here and below body is binary
        h = {
            "Host": str(u.netloc),
            "Connection": "close",
            "User-Agent": DEFAULT_USER_AGENT
        }
        if body and content_encoding:
            if content_encoding == CE_DEFLATE:
                # Deflate compression
                h["Content-Encoding"] = CE_DEFLATE
                compress = zlib.compressobj(
                    zlib.Z_DEFAULT_COMPRESSION,
                    zlib.DEFLATED,
                    -zlib.MAX_WBITS,
                    zlib.DEF_MEM_LEVEL,
                    zlib.Z_DEFAULT_STRATEGY,
                )
                body = compress.compress(body) + compress.flush()
            elif content_encoding == CE_GZIP:
                # gzip compression
                h["Content-Encoding"] = CE_GZIP
                compress = zlib.compressobj(6, zlib.DEFLATED, -zlib.MAX_WBITS,
                                            zlib.DEF_MEM_LEVEL, 0)
                crc = zlib.crc32(body, 0) & 0xFFFFFFFF
                body = b"\x1f\x8b\x08\x00%s\x02\xff%s%s%s%s" % (
                    to32u(int(time.time())),
                    compress.compress(body),
                    compress.flush(),
                    to32u(crc),
                    to32u(len(body)),
                )
        if method in REQUIRE_LENGTH_METHODS:
            h["Content-Length"] = str(len(body))
            h["Content-Type"] = content_type
        if user and password:
            # Include basic auth header
            uh = smart_text("%s:%s" % (user, password))
            h["Authorization"] = b"Basic %s" % codecs.encode(
                uh.encode("utf-8"), "base64").strip()
        if headers:
            h.update(headers)
        path = u.path
        if u.query:
            path += "?%s" % u.query
        req = b"%s %s HTTP/1.1\r\n%s\r\n\r\n%s" % (
            smart_bytes(method),
            smart_bytes(path),
            b"\r\n".join(b"%s: %s" % (smart_bytes(k), smart_bytes(h[k]))
                         for k in h),
            body,
        )
        try:
            writer.write(req)
            await asyncio.wait_for(writer.drain(), request_timeout)
        except ConnectionResetError:
            metrics["httpclient_timeouts"] += 1
            return ERR_TIMEOUT, {}, b"Connection reset while sending request"
        except asyncio.TimeoutError:
            metrics["httpclient_timeouts"] += 1
            return ERR_TIMEOUT, {}, b"Timed out while sending request"
        parser = HttpParser()
        response_body: List[bytes] = []
        while not parser.is_message_complete():
            try:
                data = await asyncio.wait_for(reader.read(max_buffer_size),
                                              request_timeout)
                is_eof = not data
            except (asyncio.IncompleteReadError, ConnectionResetError):
                is_eof = True
            except asyncio.TimeoutError:
                metrics["httpclient_timeouts"] += 1
                return ERR_READ_TIMEOUT, {}, b"Request timed out"
            if is_eof:
                if eof_mark and response_body:
                    # Check if EOF mark is in received data
                    response_body = [b"".join(response_body)]
                    if isinstance(eof_mark, str):
                        if eof_mark in response_body[0]:
                            break
                    else:
                        found = False
                        for m in eof_mark:
                            if m in response_body[0]:
                                found = True
                                break
                        if found:
                            break
                metrics["httpclient_timeouts"] += 1
                return ERR_READ_TIMEOUT, {}, b"Connection reset"
            received = len(data)
            parsed = parser.execute(data, received)
            if parsed != received:
                return ERR_PARSE_ERROR, {}, b"Parse error"
            if parser.is_partial_body():
                response_body += [parser.recv_body()]
        code = parser.get_status_code()
        parsed_headers = parser.get_headers()
        logger.debug("HTTP Response %s", code)
        if 300 <= code <= 399 and follow_redirects:
            # Process redirects
            if max_redirects > 0:
                new_url = parsed_headers.get("Location")
                if not new_url:
                    return ERR_PARSE_ERROR, {}, b"No Location header"
                logger.debug("HTTP redirect %s %s", code, new_url)
                return await fetch(
                    new_url,
                    method="GET",
                    headers=headers,
                    connect_timeout=connect_timeout,
                    request_timeout=request_timeout,
                    resolver=resolver,
                    max_buffer_size=max_buffer_size,
                    follow_redirects=follow_redirects,
                    max_redirects=max_redirects - 1,
                    validate_cert=validate_cert,
                    allow_proxy=allow_proxy,
                    proxies=proxies,
                )
            else:
                return 404, {}, b"Redirect limit exceeded"
        # @todo: Process gzip and deflate Content-Encoding
        return code, parsed_headers, b"".join(response_body)
    finally:
        if writer:
            writer.close()
            try:
                await writer.wait_closed()
            except ConnectionResetError:
                pass